Package Gnumed :: Package pycommon :: Module gmPG2
[frames] | no frames]

Source Code for Module Gnumed.pycommon.gmPG2

   1  """GNUmed PostgreSQL connection handling. 
   2   
   3  TODO: iterator/generator batch fetching: 
   4          - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad 
   5          - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4" 
   6   
   7  winner: 
   8  def resultset_functional_batchgenerator(cursor, size=100): 
   9          for results in iter(lambda: cursor.fetchmany(size), []): 
  10                  for rec in results: 
  11                          yield rec 
  12  """ 
  13  # ======================================================================= 
  14  __version__ = "$Revision: 1.127 $" 
  15  __author__  = "K.Hilbert <Karsten.Hilbert@gmx.net>" 
  16  __license__ = 'GPL v2 or later (details at http://www.gnu.org)' 
  17   
  18  ### imports ### 
  19  # stdlib 
  20  import time, locale, sys, re as regex, os, codecs, types, datetime as pydt, logging 
  21   
  22   
  23  # GNUmed 
  24  if __name__ == '__main__': 
  25          sys.path.insert(0, '../../') 
  26  from Gnumed.pycommon import gmLoginInfo, gmExceptions, gmDateTime, gmBorg, gmI18N, gmLog2 
  27  from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character 
  28   
  29  _log = logging.getLogger('gm.db') 
  30  _log.info(__version__) 
  31   
  32   
  33  # 3rd party 
  34  try: 
  35          import psycopg2 as dbapi 
  36  except ImportError: 
  37          _log.exception("Python database adapter psycopg2 not found.") 
  38          print "CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server." 
  39          raise 
  40  ### imports ### 
  41   
  42   
  43  _log.info('psycopg2 version: %s' % dbapi.__version__) 
  44  _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle)) 
  45  if not (float(dbapi.apilevel) >= 2.0): 
  46          raise ImportError('gmPG2: supported DB-API level too low') 
  47  if not (dbapi.threadsafety > 0): 
  48          raise ImportError('gmPG2: lacking minimum thread safety in psycopg2') 
  49  if not (dbapi.paramstyle == 'pyformat'): 
  50          raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2') 
  51  try: 
  52          dbapi.__version__.index('dt') 
  53  except ValueError: 
  54          raise ImportError('gmPG2: lacking datetime support in psycopg2') 
  55  try: 
  56          dbapi.__version__.index('ext') 
  57  except ValueError: 
  58          raise ImportError('gmPG2: lacking extensions support in psycopg2') 
  59  try: 
  60          dbapi.__version__.index('pq3') 
  61  except ValueError: 
  62          raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2') 
  63   
  64  import psycopg2.extras 
  65  import psycopg2.extensions 
  66  import psycopg2.pool 
  67  import psycopg2.errorcodes as sql_error_codes 
  68   
  69  # ======================================================================= 
  70  _default_client_encoding = 'UTF8' 
  71  _log.info('assuming default client encoding of [%s]' % _default_client_encoding) 
  72   
  73  # things timezone 
  74  _default_client_timezone = None                 # default time zone for connections 
  75  _sql_set_timezone = None 
  76  _timestamp_template = "cast('%s' as timestamp with time zone)"          # MUST NOT be uniocde or else getquoted will not work 
  77  FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone 
  78   
  79  _default_dsn = None 
  80  _default_login = None 
  81   
  82  postgresql_version_string = None 
  83  postgresql_version = None                       # accuracy: major.minor 
  84   
  85  __ro_conn_pool = None 
  86   
  87  auto_request_login_params = True 
  88  # ======================================================================= 
  89  # global data 
  90  # ======================================================================= 
  91   
  92  known_schema_hashes = { 
  93          0: 'not released, testing only', 
  94          2: 'b09d50d7ed3f91ddf4c4ddb8ea507720', 
  95          3: 'e73718eaf230d8f1d2d01afa8462e176', 
  96          4: '4428ccf2e54c289136819e701bb095ea', 
  97          5: '7e7b093af57aea48c288e76632a382e5',  # ... old (v1) style hashes 
  98          6: '90e2026ac2efd236da9c8608b8685b2d',  # new (v2) style hashes ... 
  99          7: '6c9f6d3981483f8e9433df99d1947b27', 
 100          8: '89b13a7af83337c3aad153b717e52360', 
 101          9: '641a9b2be3c378ffc2bb2f0b1c9f051d', 
 102          10: '7ef42a8fb2bd929a2cdd0c63864b4e8a', 
 103          11: '03042ae24f3f92877d986fb0a6184d76', 
 104          12: '06183a6616db62257e22814007a8ed07', 
 105          13: 'fab7c1ae408a6530c47f9b5111a0841e', 
 106          14: 'e170d543f067d1ea60bfe9076b1560cf', 
 107          15: '70012ff960b77ecdff4981c94b5b55b6', 
 108          16: '0bcf44ca22c479b52976e5eda1de8161' 
 109  } 
 110   
 111  map_schema_hash2version = { 
 112          'b09d50d7ed3f91ddf4c4ddb8ea507720': 2, 
 113          'e73718eaf230d8f1d2d01afa8462e176': 3, 
 114          '4428ccf2e54c289136819e701bb095ea': 4, 
 115          '7e7b093af57aea48c288e76632a382e5': 5, 
 116          '90e2026ac2efd236da9c8608b8685b2d': 6, 
 117          '6c9f6d3981483f8e9433df99d1947b27': 7, 
 118          '89b13a7af83337c3aad153b717e52360': 8, 
 119          '641a9b2be3c378ffc2bb2f0b1c9f051d': 9, 
 120          '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10, 
 121          '03042ae24f3f92877d986fb0a6184d76': 11, 
 122          '06183a6616db62257e22814007a8ed07': 12, 
 123          'fab7c1ae408a6530c47f9b5111a0841e': 13, 
 124          'e170d543f067d1ea60bfe9076b1560cf': 14, 
 125          '70012ff960b77ecdff4981c94b5b55b6': 15, 
 126          '0bcf44ca22c479b52976e5eda1de8161': 16 
 127  } 
 128   
 129  map_client_branch2required_db_version = { 
 130          u'GIT tree': 0, 
 131          u'0.3': 9, 
 132          u'0.4': 10, 
 133          u'0.5': 11, 
 134          u'0.6': 12, 
 135          u'0.7': 13, 
 136          u'0.8': 14, 
 137          u'0.9': 15, 
 138          u'1.0': 16, 
 139          u'1.1': 16 
 140  } 
 141   
 142  # get columns and data types for a given table 
 143  query_table_col_defs = u"""select 
 144          cols.column_name, 
 145          cols.udt_name 
 146  from 
 147          information_schema.columns cols 
 148  where 
 149          cols.table_schema = %s 
 150                  and 
 151          cols.table_name = %s 
 152  order by 
 153          cols.ordinal_position""" 
 154   
 155  query_table_attributes = u"""select 
 156          cols.column_name 
 157  from 
 158          information_schema.columns cols 
 159  where 
 160          cols.table_schema = %s 
 161                  and 
 162          cols.table_name = %s 
 163  order by 
 164          cols.ordinal_position""" 
 165   
 166  # ======================================================================= 
 167  # module globals API 
 168  # ======================================================================= 
169 -def set_default_client_encoding(encoding = None):
170 # check whether psycopg2 can handle this encoding 171 if encoding not in psycopg2.extensions.encodings: 172 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding) 173 # check whether Python can handle this encoding 174 py_enc = psycopg2.extensions.encodings[encoding] 175 try: 176 codecs.lookup(py_enc) 177 except LookupError: 178 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc)) 179 raise 180 # FIXME: check encoding against the database 181 # FIXME: - but we may not yet have access 182 # FIXME: - psycopg2 will pull its encodings from the database eventually 183 # it seems save to set it 184 global _default_client_encoding 185 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, str(encoding))) 186 _default_client_encoding = encoding 187 return True
188 #---------------------------------------------------
189 -def set_default_client_timezone(timezone = None):
190 191 # FIXME: use __validate 192 global _default_client_timezone 193 _log.info('setting default client time zone from [%s] to [%s]' % (_default_client_timezone, timezone)) 194 _default_client_timezone = timezone 195 196 global _sql_set_timezone 197 _sql_set_timezone = u'set timezone to %s' 198 199 return True
200 #---------------------------------------------------
201 -def __validate_timezone(conn=None, timezone=None):
202 203 _log.debug(u'validating time zone [%s]', timezone) 204 205 cmd = u'set timezone to %(tz)s' 206 args = {u'tz': timezone} 207 208 conn.commit() 209 curs = conn.cursor() 210 is_valid = False 211 try: 212 curs.execute(cmd, args) 213 _log.info(u'time zone [%s] is settable', timezone) 214 # can we actually use it, though ? 215 cmd = u"""select '1920-01-19 23:00:00+01'::timestamp with time zone""" 216 try: 217 curs.execute(cmd) 218 curs.fetchone() 219 _log.info(u'time zone [%s] is usable', timezone) 220 is_valid = True 221 except: 222 _log.error('error using time zone [%s]', timezone) 223 except dbapi.DataError: 224 _log.warning(u'time zone [%s] is not settable', timezone) 225 except: 226 _log.error(u'failed to set time zone to [%s]', timezone) 227 _log.exception(u'') 228 229 curs.close() 230 conn.rollback() 231 232 return is_valid
233 #---------------------------------------------------
234 -def __expand_timezone(conn=None, timezone=None):
235 """some timezone defs are abbreviations so try to expand 236 them because "set time zone" doesn't take abbreviations""" 237 238 cmd = u""" 239 select distinct on (abbrev) name 240 from pg_timezone_names 241 where 242 abbrev = %(tz)s and 243 name ~ '^[^/]+/[^/]+$' and 244 name !~ '^Etc/' 245 """ 246 args = {u'tz': timezone} 247 248 conn.commit() 249 curs = conn.cursor() 250 251 result = timezone 252 try: 253 curs.execute(cmd, args) 254 rows = curs.fetchall() 255 if len(rows) > 0: 256 result = rows[0][0] 257 _log.debug(u'[%s] maps to [%s]', timezone, result) 258 except: 259 _log.exception(u'cannot expand timezone abbreviation [%s]', timezone) 260 261 curs.close() 262 conn.rollback() 263 264 return result
265 #---------------------------------------------------
266 -def __detect_client_timezone(conn=None):
267 """This is run on the very first connection.""" 268 269 # FIXME: check whether server.timezone is the same 270 # FIXME: value as what we eventually detect 271 272 # we need gmDateTime to be initialized 273 if gmDateTime.current_local_iso_numeric_timezone_string is None: 274 gmDateTime.init() 275 276 _log.debug('trying to detect timezone from system') 277 278 tz_candidates = [] 279 try: 280 tz = os.environ['TZ'].decode(gmI18N.get_encoding(), 'replace') 281 tz_candidates.append(tz) 282 expanded = __expand_timezone(conn = conn, timezone = tz) 283 if expanded != tz: 284 tz_candidates.append(expanded) 285 except KeyError: 286 pass 287 288 tz_candidates.append(gmDateTime.current_local_timezone_name) 289 expanded = __expand_timezone(conn = conn, timezone = gmDateTime.current_local_timezone_name) 290 if expanded != gmDateTime.current_local_timezone_name: 291 tz_candidates.append(expanded) 292 293 _log.debug('candidates: %s', str(tz_candidates)) 294 295 # find best among candidates 296 global _default_client_timezone 297 global _sql_set_timezone 298 found = False 299 for tz in tz_candidates: 300 if __validate_timezone(conn = conn, timezone = tz): 301 _default_client_timezone = tz 302 _sql_set_timezone = u'set timezone to %s' 303 found = True 304 break 305 306 if not found: 307 _default_client_timezone = gmDateTime.current_local_iso_numeric_timezone_string 308 _sql_set_timezone = u"set time zone interval %s hour to minute" 309 310 _log.info('client system time zone detected as equivalent to [%s]', _default_client_timezone)
311 # ======================================================================= 312 # login API 313 # =======================================================================
314 -def __request_login_params_tui():
315 """Text mode request of database login parameters""" 316 import getpass 317 login = gmLoginInfo.LoginInfo() 318 319 print "\nPlease enter the required login parameters:" 320 try: 321 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '') 322 login.database = prompted_input(prompt = "database", default = 'gnumed_v17') 323 login.user = prompted_input(prompt = "user name", default = '') 324 tmp = 'password for "%s" (not shown): ' % login.user 325 login.password = getpass.getpass(tmp) 326 login.port = prompted_input(prompt = "port", default = 5432) 327 except KeyboardInterrupt: 328 _log.warning("user cancelled text mode login dialog") 329 print "user cancelled text mode login dialog" 330 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!")) 331 332 return login
333 #---------------------------------------------------
334 -def __request_login_params_gui_wx():
335 """GUI (wx) input request for database login parameters. 336 337 Returns gmLoginInfo.LoginInfo object 338 """ 339 import wx 340 # OK, wxPython was already loaded. But has the main Application instance 341 # been initialized yet ? if not, the exception will kick us out 342 if wx.GetApp() is None: 343 raise gmExceptions.NoGuiError(_("The wxPython GUI framework hasn't been initialized yet!")) 344 345 # Let's launch the login dialog 346 # if wx was not initialized /no main App loop, an exception should be raised anyway 347 import gmAuthWidgets 348 dlg = gmAuthWidgets.cLoginDialog(None, -1) 349 dlg.ShowModal() 350 login = dlg.panel.GetLoginInfo() 351 dlg.Destroy() 352 353 #if user cancelled or something else went wrong, raise an exception 354 if login is None: 355 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!")) 356 357 return login
358 #---------------------------------------------------
359 -def request_login_params():
360 """Request login parameters for database connection.""" 361 # do we auto-request parameters at all ? 362 if not auto_request_login_params: 363 raise Exception('Cannot request login parameters.') 364 365 # are we inside X ? 366 # (if we aren't wxGTK will crash hard at 367 # C-level with "can't open Display") 368 if os.environ.has_key('DISPLAY'): 369 # try wxPython GUI 370 try: return __request_login_params_gui_wx() 371 except: pass 372 373 # well, either we are on the console or 374 # wxPython does not work, use text mode 375 return __request_login_params_tui()
376 377 # ======================================================================= 378 # DSN API 379 # -----------------------------------------------------------------------
380 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
381 dsn_parts = [] 382 383 if (database is not None) and (database.strip() != ''): 384 dsn_parts.append('dbname=%s' % database) 385 386 if (host is not None) and (host.strip() != ''): 387 dsn_parts.append('host=%s' % host) 388 389 if (port is not None) and (str(port).strip() != ''): 390 dsn_parts.append('port=%s' % port) 391 392 if (user is not None) and (user.strip() != ''): 393 dsn_parts.append('user=%s' % user) 394 395 if (password is not None) and (password.strip() != ''): 396 dsn_parts.append('password=%s' % password) 397 398 dsn_parts.append('sslmode=prefer') 399 400 return ' '.join(dsn_parts)
401 # ------------------------------------------------------
402 -def get_default_login():
403 # make sure we do have a login 404 get_default_dsn() 405 return _default_login
406 # ------------------------------------------------------
407 -def get_default_dsn():
408 global _default_dsn 409 if _default_dsn is not None: 410 return _default_dsn 411 412 login = request_login_params() 413 set_default_login(login=login) 414 415 return _default_dsn
416 # ------------------------------------------------------
417 -def set_default_login(login=None):
418 if login is None: 419 return False 420 421 if login.host is not None: 422 if login.host.strip() == u'': 423 login.host = None 424 425 global _default_login 426 _default_login = login 427 _log.info('setting default login from [%s] to [%s]' % (_default_login, login)) 428 429 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password) 430 431 global _default_dsn 432 if _default_dsn is None: 433 old_dsn = u'None' 434 else: 435 old_dsn = regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, _default_dsn) 436 _log.info ('setting default DSN from [%s] to [%s]', 437 old_dsn, 438 regex.sub(r'password=[^\s]+', u'password=%s' % u_replacement_character, dsn) 439 ) 440 _default_dsn = dsn 441 442 return True
443 # ======================================================================= 444 # netadata API 445 # =======================================================================
446 -def database_schema_compatible(link_obj=None, version=None, verbose=True):
447 expected_hash = known_schema_hashes[version] 448 if version == 0: 449 args = {'ver': 9999} 450 else: 451 args = {'ver': version} 452 rows, idx = run_ro_queries ( 453 link_obj = link_obj, 454 queries = [{ 455 'cmd': u'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5', 456 'args': args 457 }] 458 ) 459 if rows[0]['md5'] != expected_hash: 460 _log.error('database schema version mismatch') 461 _log.error('expected: %s (%s)' % (version, expected_hash)) 462 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5'])) 463 if verbose: 464 _log.debug('schema dump follows:') 465 for line in get_schema_structure(link_obj=link_obj).split(): 466 _log.debug(line) 467 _log.debug('schema revision history dump follows:') 468 for line in get_schema_revision_history(link_obj=link_obj): 469 _log.debug(u' - '.join(line)) 470 return False 471 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5'])) 472 return True
473 #------------------------------------------------------------------------
474 -def get_schema_version(link_obj=None):
475 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 476 try: 477 return map_schema_hash2version[rows[0]['md5']] 478 except KeyError: 479 return u'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
480 #------------------------------------------------------------------------
481 -def get_schema_structure(link_obj=None):
482 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select gm.concat_table_structure()'}]) 483 return rows[0][0]
484 #------------------------------------------------------------------------
485 -def get_schema_hash(link_obj=None):
486 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': u'select md5(gm.concat_table_structure()) as md5'}]) 487 return rows[0]['md5']
488 #------------------------------------------------------------------------
489 -def get_schema_revision_history(link_obj=None):
490 cmd = u""" 491 select 492 imported::text, 493 version, 494 filename 495 from gm.schema_revision 496 order by imported 497 """ 498 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': cmd}]) 499 return rows
500 #------------------------------------------------------------------------
501 -def get_current_user():
502 rows, idx = run_ro_queries(queries = [{'cmd': u'select CURRENT_USER'}]) 503 return rows[0][0]
504 #------------------------------------------------------------------------
505 -def get_foreign_keys2column(schema='public', table=None, column=None, link_obj=None):
506 """Get the foreign keys pointing to schema.table.column. 507 508 Does not properly work with multi-column FKs. 509 GNUmed doesn't use any, however. 510 """ 511 cmd = u""" 512 select 513 %(schema)s as referenced_schema, 514 %(tbl)s as referenced_table, 515 %(col)s as referenced_column, 516 pgc.confkey as referenced_column_list, 517 pgc.conrelid::regclass as referencing_table, 518 pgc.conkey as referencing_column_list, 519 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) as referencing_column 520 from 521 pg_constraint pgc 522 where 523 pgc.contype = 'f' 524 and 525 pgc.confrelid = ( 526 select oid from pg_class where relname = %(tbl)s and relnamespace = ( 527 select oid from pg_namespace where nspname = %(schema)s 528 ) 529 ) and 530 ( 531 select attnum 532 from pg_attribute 533 where 534 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = ( 535 select oid from pg_namespace where nspname = %(schema)s 536 )) 537 and 538 attname = %(col)s 539 ) = any(pgc.confkey) 540 """ 541 542 args = { 543 'schema': schema, 544 'tbl': table, 545 'col': column 546 } 547 548 rows, idx = run_ro_queries ( 549 link_obj = link_obj, 550 queries = [ 551 {'cmd': cmd, 'args': args} 552 ] 553 ) 554 555 return rows
556 #------------------------------------------------------------------------
557 -def get_child_tables(schema='public', table=None, link_obj=None):
558 """Return child tables of <table>.""" 559 cmd = u""" 560 select 561 pgn.nspname as namespace, 562 pgc.relname as table 563 from 564 pg_namespace pgn, 565 pg_class pgc 566 where 567 pgc.relnamespace = pgn.oid 568 and 569 pgc.oid in ( 570 select inhrelid from pg_inherits where inhparent = ( 571 select oid from pg_class where 572 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and 573 relname = %(table)s 574 ) 575 )""" 576 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}]) 577 return rows
578 #------------------------------------------------------------------------
579 -def schema_exists(link_obj=None, schema=u'gm'):
580 cmd = u"""SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)""" 581 args = {'schema': schema} 582 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}]) 583 return rows[0][0]
584 #------------------------------------------------------------------------
585 -def table_exists(link_obj=None, schema=None, table=None):
586 """Returns false, true.""" 587 cmd = u""" 588 select exists ( 589 select 1 from information_schema.tables 590 where 591 table_schema = %s and 592 table_name = %s and 593 table_type = 'BASE TABLE' 594 )""" 595 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}]) 596 return rows[0][0]
597 #------------------------------------------------------------------------
598 -def get_col_indices(cursor = None):
599 if cursor.description is None: 600 _log.error('no result description available: unused cursor or last query did not select rows') 601 return None 602 col_indices = {} 603 col_index = 0 604 for col_desc in cursor.description: 605 col_name = col_desc[0] 606 # a query like "select 1,2;" will return two columns of the same name ! 607 # hence adjust to that, note, however, that dict-style access won't work 608 # on results of such queries ... 609 if col_indices.has_key(col_name): 610 col_name = '%s_%s' % (col_name, col_index) 611 col_indices[col_name] = col_index 612 col_index += 1 613 614 return col_indices
615 #------------------------------------------------------------------------
616 -def get_col_defs(link_obj=None, schema='public', table=None):
617 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}]) 618 col_names = [] 619 col_type = {} 620 for row in rows: 621 col_names.append(row[0]) 622 # map array types 623 if row[1].startswith('_'): 624 col_type[row[0]] = row[1][1:] + '[]' 625 else: 626 col_type[row[0]] = row[1] 627 col_defs = [] 628 col_defs.append(col_names) 629 col_defs.append(col_type) 630 return col_defs
631 #------------------------------------------------------------------------
632 -def get_col_names(link_obj=None, schema='public', table=None):
633 """Return column attributes of table""" 634 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}]) 635 cols = [] 636 for row in rows: 637 cols.append(row[0]) 638 return cols
639 640 #------------------------------------------------------------------------ 641 # i18n functions 642 #------------------------------------------------------------------------
643 -def export_translations_from_database(filename=None):
644 tx_file = codecs.open(filename, 'wb', 'utf8') 645 tx_file.write(u'-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M')) 646 tx_file.write(u'-- - contains translations for each of [%s]\n' % u', '.join(get_translation_languages())) 647 tx_file.write(u'-- - user language is set to [%s]\n\n' % get_current_user_language()) 648 tx_file.write(u'-- Please email this file to <gnumed-devel@gnu.org>.\n') 649 tx_file.write(u'-- ----------------------------------------------------------------------------------------------\n\n') 650 tx_file.write(u'set default_transaction_read_only to off;\n\n') 651 tx_file.write(u"set client_encoding to 'utf-8';\n\n") 652 tx_file.write(u'\\unset ON_ERROR_STOP\n\n') 653 654 cmd = u'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig' 655 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False) 656 for row in rows: 657 line = u"select i18n.upd_tx(quote_literal(E'%s'), quote_literal(E'%s'), quote_literal(E'%s'));\n" % ( 658 row['lang'].replace("'", "\\'"), 659 row['orig'].replace("'", "\\'"), 660 row['trans'].replace("'", "\\'") 661 ) 662 tx_file.write(line) 663 tx_file.write(u'\n') 664 665 tx_file.write(u'\set ON_ERROR_STOP 1\n') 666 tx_file.close() 667 668 return True
669 #------------------------------------------------------------------------
670 -def delete_translation_from_database(link_obj=None, language=None, original=None):
671 cmd = u'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s' 672 args = {'lang': language, 'orig': original} 673 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True) 674 return True
675 676 #------------------------------------------------------------------------
677 -def update_translation_in_database(language=None, original=None, translation=None):
678 cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)' 679 args = {'lang': language, 'orig': original, 'trans': translation} 680 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False) 681 return args
682 683 #------------------------------------------------------------------------
684 -def get_translation_languages():
685 rows, idx = run_ro_queries ( 686 queries = [{'cmd': u'select distinct lang from i18n.translations'}] 687 ) 688 return [ r[0] for r in rows ]
689 690 #------------------------------------------------------------------------
691 -def get_database_translations(language=None, order_by=None):
692 693 args = {'lang': language} 694 _log.debug('language [%s]', language) 695 696 if order_by is None: 697 order_by = u'ORDER BY %s' % order_by 698 else: 699 order_by = u'ORDER BY lang, orig' 700 701 if language is None: 702 cmd = u""" 703 SELECT DISTINCT ON (orig, lang) 704 lang, orig, trans 705 FROM (( 706 707 -- strings stored as translation keys whether translated or not 708 SELECT 709 NULL as lang, 710 ik.orig, 711 NULL AS trans 712 FROM 713 i18n.keys ik 714 715 ) UNION ALL ( 716 717 -- already translated strings 718 SELECT 719 it.lang, 720 it.orig, 721 it.trans 722 FROM 723 i18n.translations it 724 725 )) as translatable_strings 726 %s""" % order_by 727 else: 728 cmd = u""" 729 SELECT DISTINCT ON (orig, lang) 730 lang, orig, trans 731 FROM (( 732 733 -- strings stored as translation keys whether translated or not 734 SELECT 735 %%(lang)s as lang, 736 ik.orig, 737 i18n._(ik.orig, %%(lang)s) AS trans 738 FROM 739 i18n.keys ik 740 741 ) UNION ALL ( 742 743 -- already translated strings 744 SELECT 745 %%(lang)s as lang, 746 it.orig, 747 i18n._(it.orig, %%(lang)s) AS trans 748 FROM 749 i18n.translations it 750 751 )) AS translatable_strings 752 %s""" % order_by 753 754 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 755 756 if rows is None: 757 _log.error('no translatable strings found') 758 else: 759 _log.debug('%s translatable strings found', len(rows)) 760 761 return rows
762 763 #------------------------------------------------------------------------
764 -def get_current_user_language():
765 cmd = u'select i18n.get_curr_lang()' 766 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 767 return rows[0][0]
768 769 #------------------------------------------------------------------------
770 -def set_user_language(user=None, language=None):
771 """Set the user language in the database. 772 773 user = None: current db user 774 language = None: unset 775 """ 776 _log.info('setting database language for user [%s] to [%s]', user, language) 777 778 args = { 779 'usr': user, 780 'lang': language 781 } 782 783 if language is None: 784 if user is None: 785 queries = [{'cmd': u'select i18n.unset_curr_lang()'}] 786 else: 787 queries = [{'cmd': u'select i18n.unset_curr_lang(%(usr)s)', 'args': args}] 788 queries.append({'cmd': u'select True'}) 789 else: 790 if user is None: 791 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s)', 'args': args}] 792 else: 793 queries = [{'cmd': u'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}] 794 795 rows, idx = run_rw_queries(queries = queries, return_data = True) 796 797 if not rows[0][0]: 798 _log.error('cannot set database language to [%s] for user [%s]', language, user) 799 800 return rows[0][0]
801 #------------------------------------------------------------------------
802 -def force_user_language(language=None):
803 """Set the user language in the database. 804 805 - regardless of whether there is any translation available. 806 - only for the current user 807 """ 808 _log.info('forcing database language for current db user to [%s]', language) 809 810 run_rw_queries(queries = [{ 811 'cmd': u'select i18n.force_curr_lang(%(lang)s)', 812 'args': {'lang': language} 813 }])
814 #------------------------------------------------------------------------ 815 #------------------------------------------------------------------------ 816 text_expansion_keywords = None 817
818 -def get_text_expansion_keywords():
819 global text_expansion_keywords 820 if text_expansion_keywords is not None: 821 return text_expansion_keywords 822 823 cmd = u"""select keyword, public_expansion, private_expansion, owner from clin.v_keyword_expansions""" 824 rows, idx = run_ro_queries(queries = [{'cmd': cmd}]) 825 text_expansion_keywords = rows 826 827 _log.info('retrieved %s text expansion keywords', len(text_expansion_keywords)) 828 829 return text_expansion_keywords
830 #------------------------------------------------------------------------
831 -def expand_keyword(keyword = None):
832 833 # Easter Egg ;-) 834 if keyword == u'$$steffi': 835 return u'Hai, play ! Versucht das ! (Keks dazu ?) :-)' 836 837 cmd = u"""select expansion from clin.v_your_keyword_expansions where keyword = %(kwd)s""" 838 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 839 840 if len(rows) == 0: 841 return None 842 843 return rows[0]['expansion']
844 #------------------------------------------------------------------------
845 -def get_keyword_expansion_candidates(keyword = None):
846 847 if keyword is None: 848 return [] 849 850 get_text_expansion_keywords() 851 852 candidates = [] 853 for kwd in text_expansion_keywords: 854 if kwd['keyword'].startswith(keyword): 855 candidates.append(kwd['keyword']) 856 857 return candidates
858 #------------------------------------------------------------------------
859 -def add_text_expansion(keyword=None, expansion=None, public=None):
860 861 if public: 862 cmd = u"SELECT 1 from clin.v_keyword_expansions where public_expansion is true and keyword = %(kwd)s" 863 else: 864 cmd = u"SELECT 1 from clin.v_your_keyword_expansions where private_expansion is true and keyword = %(kwd)s" 865 866 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 867 if len(rows) != 0: 868 return False 869 870 if public: 871 cmd = u""" 872 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 873 values (%(kwd)s, %(exp)s, null)""" 874 else: 875 cmd = u""" 876 insert into clin.keyword_expansion (keyword, expansion, fk_staff) 877 values (%(kwd)s, %(exp)s, (SELECT pk from dem.staff where db_user = current_user))""" 878 879 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword, 'exp': expansion}}]) 880 881 global text_expansion_keywords 882 text_expansion_keywords = None 883 884 return True
885 #------------------------------------------------------------------------
886 -def delete_text_expansion(keyword):
887 cmd = u""" 888 delete from clin.keyword_expansion where 889 keyword = %(kwd)s and ( 890 (fk_staff = (SELECT pk from dem.staff where db_user = current_user)) 891 or 892 (fk_staff is null and owner = current_user) 893 )""" 894 rows, idx = run_rw_queries(queries = [{'cmd': cmd, 'args': {'kwd': keyword}}]) 895 896 global text_expansion_keywords 897 text_expansion_keywords = None
898 #------------------------------------------------------------------------
899 -def edit_text_expansion(keyword, expansion):
900 901 cmd1 = u""" 902 DELETE FROM clin.keyword_expansion 903 WHERE 904 keyword = %(kwd)s 905 AND 906 fk_staff = (SELECT pk FROM dem.staff WHERE db_user = current_user)""" 907 908 cmd2 = u""" 909 INSERT INTO clin.keyword_expansion ( 910 keyword, expansion, fk_staff 911 ) VALUES ( 912 %(kwd)s, 913 %(exp)s, 914 (SELECT pk FROM dem.staff WHERE db_user = current_user) 915 )""" 916 args = {'kwd': keyword, 'exp': expansion} 917 rows, idx = run_rw_queries(queries = [ 918 {'cmd': cmd1, 'args': args}, 919 {'cmd': cmd2, 'args': args}, 920 ]) 921 922 global text_expansion_keywords 923 text_expansion_keywords = None
924 # ======================================================================= 925 # query runners and helpers 926 # =======================================================================
927 -def send_maintenance_notification():
928 cmd = u'notify "db_maintenance_warning:"' 929 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
930 #------------------------------------------------------------------------
931 -def send_maintenance_shutdown():
932 cmd = u'notify "db_maintenance_disconnect:"' 933 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
934 #------------------------------------------------------------------------
935 -def is_pg_interval(candidate=None):
936 cmd = u'SELECT %(candidate)s::interval' 937 try: 938 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 939 return True 940 except: 941 cmd = u'SELECT %(candidate)s::text::interval' 942 try: 943 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}]) 944 return True 945 except: 946 return False
947 #------------------------------------------------------------------------
948 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None):
949 outfile = file(filename, 'wb') 950 result = bytea2file_object(data_query=data_query, file_obj=outfile, chunk_size=chunk_size, data_size=data_size, data_size_query=data_size_query) 951 outfile.close() 952 return result
953 #------------------------------------------------------------------------
954 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None):
955 """Store data from a bytea field into a file. 956 957 <data_query> 958 - dict {'cmd': ..., 'args': ...} 959 - 'cmd' must be unicode containing "... substring(data from %(start)s for %(size)s) ..." 960 - 'args' must be a dict 961 - must return one row with one field of type bytea 962 <file> 963 - must be a file like Python object 964 <data_size> 965 - integer of the total size of the expected data or None 966 <data_size_query> 967 - dict {'cmd': ..., 'args': ...} 968 - cmd must be unicode 969 - must return one row with one field with the octet_length() of the data field 970 - used only when <data_size> is None 971 """ 972 if data_size == 0: 973 return True 974 975 # If the client sets an encoding other than the default we 976 # will receive encoding-parsed data which isn't the binary 977 # content we want. Hence we need to get our own connection. 978 # It must be a read-write one so that we don't affect the 979 # encoding for other users of the shared read-only 980 # connections. 981 # Actually, encodings shouldn't be applied to binary data 982 # (eg. bytea types) in the first place but that is only 983 # reported to be fixed > v7.4. 984 # further tests reveal that at least on PG 8.0 this bug still 985 # manifests itself 986 conn = get_raw_connection(readonly=True) 987 988 if data_size is None: 989 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query]) 990 data_size = rows[0][0] 991 if data_size in [None, 0]: 992 conn.rollback() 993 return True 994 995 _log.debug('expecting bytea data of size: [%s] bytes' % data_size) 996 _log.debug('using chunk size of: [%s] bytes' % chunk_size) 997 998 # chunk size of 0 means "retrieve whole field at once" 999 if chunk_size == 0: 1000 chunk_size = data_size 1001 _log.debug('chunk size [0] bytes: retrieving all data at once') 1002 1003 # Windoze sucks: it can't transfer objects of arbitrary size, 1004 # anyways, we need to split the transfer, 1005 # however, only possible if postgres >= 7.2 1006 needed_chunks, remainder = divmod(data_size, chunk_size) 1007 _log.debug('chunks to retrieve: [%s]' % needed_chunks) 1008 _log.debug('remainder to retrieve: [%s] bytes' % remainder) 1009 1010 # try setting "bytea_output" 1011 # - fails if not necessary 1012 # - succeeds if necessary 1013 try: 1014 run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}]) 1015 except dbapi.ProgrammingError: 1016 _log.debug('failed to set bytea_output to "escape", not necessary') 1017 1018 # retrieve chunks, skipped if data size < chunk size, 1019 # does this not carry the danger of cutting up multi-byte escape sequences ? 1020 # no, since bytea is binary, 1021 # yes, since in bytea there are *some* escaped values, still 1022 # no, since those are only escaped during *transfer*, not on-disk, hence 1023 # only complete escape sequences are put on the wire 1024 for chunk_id in range(needed_chunks): 1025 chunk_start = (chunk_id * chunk_size) + 1 1026 data_query['args']['start'] = chunk_start 1027 data_query['args']['size'] = chunk_size 1028 try: 1029 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1030 except: 1031 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size)) 1032 conn.rollback() 1033 raise 1034 # it would be a fatal error to see more than one result as ids are supposed to be unique 1035 file_obj.write(str(rows[0][0])) 1036 1037 # retrieve remainder 1038 if remainder > 0: 1039 chunk_start = (needed_chunks * chunk_size) + 1 1040 data_query['args']['start'] = chunk_start 1041 data_query['args']['size'] = remainder 1042 try: 1043 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query]) 1044 except: 1045 _log.error('cannot retrieve remaining [%s] bytes' % remainder) 1046 conn.rollback() 1047 raise 1048 # it would be a fatal error to see more than one result as ids are supposed to be unique 1049 file_obj.write(str(rows[0][0])) 1050 1051 conn.rollback() 1052 return True
1053 #------------------------------------------------------------------------
1054 -def file2bytea(query=None, filename=None, args=None, conn=None):
1055 """Store data from a file into a bytea field. 1056 1057 The query must: 1058 - be in unicode 1059 - contain a format spec identifying the row (eg a primary key) 1060 matching <args> if it is an UPDATE 1061 - contain a format spec %(data)s::bytea 1062 """ 1063 # read data from file 1064 infile = file(filename, "rb") 1065 data_as_byte_string = infile.read() 1066 infile.close() 1067 if args is None: 1068 args = {} 1069 args['data'] = buffer(data_as_byte_string) 1070 del(data_as_byte_string) 1071 1072 # insert the data 1073 if conn is None: 1074 conn = get_raw_connection(readonly=False) 1075 close_conn = True 1076 else: 1077 close_conn = False 1078 1079 run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = True) 1080 1081 if close_conn: 1082 conn.close() 1083 1084 return
1085 #------------------------------------------------------------------------
1086 -def sanitize_pg_regex(expression=None, escape_all=False):
1087 """Escape input for use in a PostgreSQL regular expression. 1088 1089 If a fragment comes from user input and is to be used 1090 as a regular expression we need to make sure it doesn't 1091 contain invalid regex patterns such as unbalanced ('s. 1092 1093 <escape_all> 1094 True: try to escape *all* metacharacters 1095 False: only escape those which render the regex invalid 1096 """ 1097 return expression.replace ( 1098 '(', '\(' 1099 ).replace ( 1100 ')', '\)' 1101 ).replace ( 1102 '[', '\[' 1103 ).replace ( 1104 '+', '\+' 1105 ).replace ( 1106 '.', '\.' 1107 ).replace ( 1108 '*', '\*' 1109 )
1110 #']', '\]', # not needed 1111 #------------------------------------------------------------------------
1112 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1113 """Run read-only queries. 1114 1115 <queries> must be a list of dicts: 1116 [ 1117 {'cmd': <string>, 'args': <dict> or <tuple>}, 1118 {...}, 1119 ... 1120 ] 1121 """ 1122 if isinstance(link_obj, dbapi._psycopg.cursor): 1123 curs = link_obj 1124 curs_close = __noop 1125 tx_rollback = __noop 1126 elif isinstance(link_obj, dbapi._psycopg.connection): 1127 curs = link_obj.cursor() 1128 curs_close = curs.close 1129 tx_rollback = link_obj.rollback 1130 elif link_obj is None: 1131 conn = get_connection(readonly=True, verbose=verbose) 1132 curs = conn.cursor() 1133 curs_close = curs.close 1134 tx_rollback = conn.rollback 1135 else: 1136 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj) 1137 1138 if verbose: 1139 _log.debug('cursor: %s', curs) 1140 1141 for query in queries: 1142 if type(query['cmd']) is not types.UnicodeType: 1143 print "run_ro_queries(): non-unicode query" 1144 print query['cmd'] 1145 try: 1146 args = query['args'] 1147 except KeyError: 1148 args = None 1149 try: 1150 curs.execute(query['cmd'], args) 1151 if verbose: 1152 _log.debug('ran query: [%s]', curs.query) 1153 _log.debug('PG status message: %s', curs.statusmessage) 1154 _log.debug('cursor description: %s', str(curs.description)) 1155 except: 1156 # FIXME: use .pgcode 1157 try: 1158 curs_close() 1159 except dbapi.InterfaceError: 1160 _log.exception('cannot close cursor') 1161 tx_rollback() # need to rollback so ABORT state isn't preserved in pooled conns 1162 _log.error('query failed: [%s]', curs.query) 1163 _log.error('PG status message: %s', curs.statusmessage) 1164 raise 1165 1166 data = None 1167 col_idx = None 1168 if return_data: 1169 data = curs.fetchall() 1170 if verbose: 1171 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data)) 1172 _log.debug('cursor description: %s', str(curs.description)) 1173 if get_col_idx: 1174 col_idx = get_col_indices(curs) 1175 1176 curs_close() 1177 tx_rollback() # rollback just so that we don't stay IDLE IN TRANSACTION forever 1178 return (data, col_idx)
1179 #------------------------------------------------------------------------
1180 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1181 """Convenience function for running a transaction 1182 that is supposed to get committed. 1183 1184 <link_obj> 1185 can be either: 1186 - a cursor 1187 - a connection 1188 1189 <queries> 1190 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>) 1191 to be executed as a single transaction, the last 1192 query may usefully return rows (such as a 1193 "SELECT currval('some_sequence')" statement) 1194 1195 <end_tx> 1196 - controls whether the transaction is finalized (eg. 1197 committed/rolled back) or not, this allows the 1198 call to run_rw_queries() to be part of a framing 1199 transaction 1200 - if link_obj is a connection then <end_tx> will 1201 default to False unless it is explicitly set to 1202 True which is taken to mean "yes, you do have full 1203 control over the transaction" in which case the 1204 transaction is properly finalized 1205 - if link_obj is a cursor we CANNOT finalize the 1206 transaction because we would need the connection for that 1207 - if link_obj is None <end_tx> will, of course, always be True 1208 1209 <return_data> 1210 - if true, the returned data will include the rows 1211 the last query selected 1212 - if false, it returns None instead 1213 1214 <get_col_idx> 1215 - if true, the returned data will include a dictionary 1216 mapping field names to column positions 1217 - if false, the returned data returns None instead 1218 1219 method result: 1220 - returns a tuple (data, idx) 1221 - <data>: 1222 * (None, None) if last query did not return rows 1223 * ("fetchall() result", <index>) if last query returned any rows 1224 * for <index> see <get_col_idx> 1225 """ 1226 if isinstance(link_obj, dbapi._psycopg.cursor): 1227 conn_close = __noop 1228 conn_commit = __noop 1229 conn_rollback = __noop 1230 curs = link_obj 1231 curs_close = __noop 1232 elif isinstance(link_obj, dbapi._psycopg.connection): 1233 conn_close = __noop 1234 if end_tx: 1235 conn_commit = link_obj.commit 1236 conn_rollback = link_obj.rollback 1237 else: 1238 conn_commit = __noop 1239 conn_rollback = __noop 1240 curs = link_obj.cursor() 1241 curs_close = curs.close 1242 elif link_obj is None: 1243 conn = get_connection(readonly=False) 1244 conn_close = conn.close 1245 conn_commit = conn.commit 1246 conn_rollback = conn.rollback 1247 curs = conn.cursor() 1248 curs_close = curs.close 1249 else: 1250 raise ValueError('link_obj must be cursor, connection or None and not [%s]' % link_obj) 1251 1252 for query in queries: 1253 if type(query['cmd']) is not types.UnicodeType: 1254 print "run_rw_queries(): non-unicode query" 1255 print query['cmd'] 1256 try: 1257 args = query['args'] 1258 except KeyError: 1259 args = None 1260 try: 1261 curs.execute(query['cmd'], args) 1262 except: 1263 _log.exception('error running RW query') 1264 gmLog2.log_stack_trace() 1265 try: 1266 curs_close() 1267 conn_rollback() 1268 conn_close() 1269 except dbapi.InterfaceError: 1270 _log.exception('cannot cleanup') 1271 raise 1272 raise 1273 1274 data = None 1275 col_idx = None 1276 if return_data: 1277 try: 1278 data = curs.fetchall() 1279 except: 1280 _log.exception('error fetching data from RW query') 1281 gmLog2.log_stack_trace() 1282 try: 1283 curs_close() 1284 conn_rollback() 1285 conn_close() 1286 except dbapi.InterfaceError: 1287 _log.exception('cannot cleanup') 1288 raise 1289 raise 1290 if get_col_idx: 1291 col_idx = get_col_indices(curs) 1292 1293 curs_close() 1294 conn_commit() 1295 conn_close() 1296 1297 return (data, col_idx)
1298 #------------------------------------------------------------------------
1299 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
1300 """Generates SQL for an INSERT query. 1301 1302 values: dict of values keyed by field to insert them into 1303 """ 1304 if schema is None: 1305 schema = u'public' 1306 1307 fields = values.keys() # that way val_snippets and fields really should end up in the same order 1308 val_snippets = [] 1309 for field in fields: 1310 val_snippets.append(u'%%(%s)s' % field) 1311 1312 if returning is None: 1313 returning = u'' 1314 return_data = False 1315 else: 1316 returning = u'\n\tRETURNING\n\t\t%s' % u', '.join(returning) 1317 return_data = True 1318 1319 cmd = u"""\nINSERT INTO %s.%s ( 1320 %s 1321 ) VALUES ( 1322 %s 1323 )%s""" % ( 1324 schema, 1325 table, 1326 u',\n\t\t'.join(fields), 1327 u',\n\t\t'.join(val_snippets), 1328 returning 1329 ) 1330 1331 _log.debug(u'running SQL: >>>%s<<<', cmd) 1332 1333 return run_rw_queries ( 1334 link_obj = link_obj, 1335 queries = [{'cmd': cmd, 'args': values}], 1336 end_tx = end_tx, 1337 return_data = return_data, 1338 get_col_idx = get_col_idx, 1339 verbose = verbose 1340 )
1341 # ======================================================================= 1342 # connection handling API 1343 # -----------------------------------------------------------------------
1344 -class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
1345 """ 1346 GNUmed database connection pool. 1347 1348 Extends psycopg2's PersistentConnectionPool with 1349 a custom _connect() function. Supports one connection 1350 per thread - which also ties it to one particular DSN. 1351 """ 1352 #--------------------------------------------------
1353 - def _connect(self, key=None):
1354 1355 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly=True) 1356 1357 conn.original_close = conn.close 1358 conn.close = _raise_exception_on_ro_conn_close 1359 1360 if key is not None: 1361 self._used[key] = conn 1362 self._rused[id(conn)] = key 1363 else: 1364 self._pool.append(conn) 1365 1366 return conn
1367 #--------------------------------------------------
1368 - def shutdown(self):
1369 for conn_key in self._used.keys(): 1370 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid()) 1371 self._used[conn_key].original_close()
1372 # -----------------------------------------------------------------------
1373 -def get_raw_connection(dsn=None, verbose=False, readonly=True):
1374 """Get a raw, unadorned connection. 1375 1376 - this will not set any parameters such as encoding, timezone, datestyle 1377 - the only requirement is a valid DSN 1378 - hence it can be used for "service" connections 1379 for verifying encodings etc 1380 """ 1381 # FIXME: support verbose 1382 if dsn is None: 1383 dsn = get_default_dsn() 1384 1385 if u'host=salaam.homeunix' in dsn: 1386 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.') 1387 1388 try: 1389 conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection) 1390 except dbapi.OperationalError, e: 1391 1392 t, v, tb = sys.exc_info() 1393 try: 1394 msg = e.args[0] 1395 except (AttributeError, IndexError, TypeError): 1396 raise 1397 1398 msg = unicode(msg, gmI18N.get_encoding(), 'replace') 1399 1400 if msg.find('fe_sendauth') != -1: 1401 raise cAuthenticationError, (dsn, msg), tb 1402 1403 if regex.search('user ".*" does not exist', msg) is not None: 1404 raise cAuthenticationError, (dsn, msg), tb 1405 1406 if msg.find('uthenti') != -1: 1407 raise cAuthenticationError, (dsn, msg), tb 1408 1409 raise 1410 1411 _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly) 1412 1413 # do first-time stuff 1414 global postgresql_version 1415 if postgresql_version is None: 1416 curs = conn.cursor() 1417 curs.execute(""" 1418 SELECT 1419 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version 1420 FROM 1421 pg_settings 1422 WHERE 1423 name = 'server_version' 1424 """) 1425 postgresql_version = curs.fetchone()['version'] 1426 _log.info('PostgreSQL version (numeric): %s' % postgresql_version) 1427 try: 1428 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))") 1429 _log.info('database size: %s', curs.fetchone()[0]) 1430 except: 1431 pass 1432 if verbose: 1433 __log_PG_settings(curs=curs) 1434 curs.close() 1435 conn.commit() 1436 1437 if _default_client_timezone is None: 1438 __detect_client_timezone(conn = conn) 1439 1440 curs = conn.cursor() 1441 1442 # set access mode 1443 if readonly: 1444 _log.debug('access mode [READ ONLY]') 1445 cmd = 'set session characteristics as transaction READ ONLY' 1446 curs.execute(cmd) 1447 cmd = 'set default_transaction_read_only to on' 1448 curs.execute(cmd) 1449 else: 1450 _log.debug('access mode [READ WRITE]') 1451 cmd = 'set session characteristics as transaction READ WRITE' 1452 curs.execute(cmd) 1453 cmd = 'set default_transaction_read_only to off' 1454 curs.execute(cmd) 1455 1456 curs.close() 1457 conn.commit() 1458 1459 conn.is_decorated = False 1460 1461 return conn
1462 # =======================================================================
1463 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
1464 """Get a new connection. 1465 1466 This assumes the locale system has been initialized 1467 unless an encoding is specified. 1468 """ 1469 # FIXME: support pooled on RW, too 1470 # FIXME: for now, support the default DSN only 1471 if pooled and readonly and (dsn is None): 1472 global __ro_conn_pool 1473 if __ro_conn_pool is None: 1474 __ro_conn_pool = cConnectionPool ( 1475 minconn = 1, 1476 maxconn = 2, 1477 dsn = dsn, 1478 verbose = verbose 1479 ) 1480 conn = __ro_conn_pool.getconn() 1481 else: 1482 conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False) 1483 1484 if conn.is_decorated: 1485 return conn 1486 1487 if encoding is None: 1488 encoding = _default_client_encoding 1489 if encoding is None: 1490 encoding = gmI18N.get_encoding() 1491 _log.warning('client encoding not specified') 1492 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding) 1493 _log.warning('for this to work properly the application MUST have called locale.setlocale() before') 1494 1495 # set connection properties 1496 # - client encoding 1497 try: 1498 conn.set_client_encoding(encoding) 1499 except dbapi.OperationalError: 1500 t, v, tb = sys.exc_info() 1501 if str(v).find("can't set encoding to") != -1: 1502 raise cEncodingError, (encoding, v), tb 1503 raise 1504 1505 # - transaction isolation level 1506 if readonly: 1507 iso_level = u'read committed' 1508 else: 1509 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE) 1510 iso_level = u'serializable' 1511 1512 _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone) 1513 1514 curs = conn.cursor() 1515 1516 # - client time zone 1517 curs.execute(_sql_set_timezone, [_default_client_timezone]) 1518 1519 conn.commit() 1520 1521 # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is 1522 # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround 1523 # 1524 # - bytea data format 1525 # PG 9.0 switched to - by default - using "hex" rather than "escape", 1526 # however, psycopg2's linked with a pre-9.0 libpq do assume "escape" 1527 # as the transmission mode for bytea output, 1528 # so try to set this setting back to "escape", 1529 # if that's not possible the reason will be that PG < 9.0 does not support 1530 # that setting - which also means we don't need it and can ignore the 1531 # failure 1532 cmd = "set bytea_output to 'escape'" 1533 try: 1534 curs.execute(cmd) 1535 except dbapi.ProgrammingError: 1536 _log.error('cannot set bytea_output format') 1537 1538 curs.close() 1539 conn.commit() 1540 1541 conn.is_decorated = True 1542 1543 return conn
1544 #-----------------------------------------------------------------------
1545 -def shutdown():
1546 if __ro_conn_pool is None: 1547 return 1548 __ro_conn_pool.shutdown()
1549 # ====================================================================== 1550 # internal helpers 1551 #-----------------------------------------------------------------------
1552 -def __noop():
1553 pass
1554 #-----------------------------------------------------------------------
1555 -def _raise_exception_on_ro_conn_close():
1556 raise TypeError(u'close() called on read-only connection')
1557 #-----------------------------------------------------------------------
1558 -def log_database_access(action=None):
1559 run_insert ( 1560 schema = u'gm', 1561 table = u'access_log', 1562 values = {u'user_action': action}, 1563 end_tx = True 1564 )
1565 #-----------------------------------------------------------------------
1566 -def sanity_check_time_skew(tolerance=60):
1567 """Check server time and local time to be within 1568 the given tolerance of each other. 1569 1570 tolerance: seconds 1571 """ 1572 _log.debug('maximum skew tolerance (seconds): %s', tolerance) 1573 1574 cmd = u"SELECT now() at time zone 'UTC'" 1575 conn = get_raw_connection(readonly=True) 1576 curs = conn.cursor() 1577 1578 start = time.time() 1579 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}]) 1580 end = time.time() 1581 client_now_as_utc = pydt.datetime.utcnow() 1582 1583 curs.close() 1584 conn.commit() 1585 1586 server_now_as_utc = rows[0][0] 1587 query_duration = end - start 1588 _log.info('server "now" (UTC): %s', server_now_as_utc) 1589 _log.info('client "now" (UTC): %s', client_now_as_utc) 1590 _log.debug('wire roundtrip (seconds): %s', query_duration) 1591 1592 if query_duration > tolerance: 1593 _log.error('useless to check client/server time skew, wire roundtrip > tolerance') 1594 return False 1595 1596 if server_now_as_utc > client_now_as_utc: 1597 real_skew = server_now_as_utc - client_now_as_utc 1598 else: 1599 real_skew = client_now_as_utc - server_now_as_utc 1600 1601 _log.debug('client/server time skew: %s', real_skew) 1602 1603 if real_skew > pydt.timedelta(seconds = tolerance): 1604 _log.error('client/server time skew > tolerance') 1605 return False 1606 1607 return True
1608 #-----------------------------------------------------------------------
1609 -def sanity_check_database_settings():
1610 """Checks database settings. 1611 1612 returns (status, message) 1613 status: 1614 0: no problem 1615 1: non-fatal problem 1616 2: fatal problem 1617 """ 1618 _log.debug('checking database settings') 1619 1620 conn = get_connection() 1621 1622 # - version string 1623 global postgresql_version_string 1624 if postgresql_version_string is None: 1625 curs = conn.cursor() 1626 curs.execute('SELECT version()') 1627 postgresql_version_string = curs.fetchone()['version'] 1628 curs.close() 1629 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string) 1630 1631 options2check = { 1632 # setting: [expected value, risk, fatal?] 1633 u'allow_system_table_mods': [u'off', u'system breakage', False], 1634 u'check_function_bodies': [u'on', u'suboptimal error detection', False], 1635 u'datestyle': [u'ISO', u'faulty timestamp parsing', True], 1636 u'default_transaction_isolation': [u'read committed', u'faulty database reads', True], 1637 u'default_transaction_read_only': [u'on', u'accidental database writes', False], 1638 u'fsync': [u'on', u'data loss/corruption', True], 1639 u'full_page_writes': [u'on', u'data loss/corruption', False], 1640 u'lc_messages': [u'C', u'suboptimal error detection', False], 1641 u'password_encryption': [u'on', u'breach of confidentiality', False], 1642 u'regex_flavor': [u'advanced', u'query breakage', False], # 9.0 doesn't support this anymore, default now advanced anyway 1643 u'synchronous_commit': [u'on', u'data loss/corruption', False], 1644 u'sql_inheritance': [u'on', u'query breakage, data loss/corruption', True] 1645 } 1646 1647 from Gnumed.pycommon import gmCfg2 1648 _cfg = gmCfg2.gmCfgData() 1649 if _cfg.get(option = u'hipaa'): 1650 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', True] 1651 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', True] 1652 else: 1653 options2check[u'log_connections'] = [u'on', u'non-compliance with HIPAA', None] 1654 options2check[u'log_disconnections'] = [u'on', u'non-compliance with HIPAA', None] 1655 1656 cmd = u"SELECT name, setting from pg_settings where name in %(settings)s" 1657 rows, idx = run_ro_queries ( 1658 link_obj = conn, 1659 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}], 1660 get_col_idx = False 1661 ) 1662 1663 found_error = False 1664 found_problem = False 1665 msg = [] 1666 for row in rows: 1667 option = row['name'] 1668 value_found = row['setting'] 1669 value_expected = options2check[option][0] 1670 risk = options2check[option][1] 1671 fatal_setting = options2check[option][2] 1672 if value_found != value_expected: 1673 if fatal_setting is True: 1674 found_error = True 1675 elif fatal_setting is False: 1676 found_problem = True 1677 elif fatal_setting is None: 1678 pass 1679 else: 1680 _log.error(options2check[option]) 1681 raise ValueError(u'invalid database configuration sanity check') 1682 msg.append(_(' option [%s]: %s') % (option, value_found)) 1683 msg.append(_(' risk: %s') % risk) 1684 _log.warning('PG option [%s] set to [%s], expected [%s], risk: <%s>' % (option, value_found, value_expected, risk)) 1685 1686 if found_error: 1687 return 2, u'\n'.join(msg) 1688 1689 if found_problem: 1690 return 1, u'\n'.join(msg) 1691 1692 return 0, u''
1693 #------------------------------------------------------------------------
1694 -def __log_PG_settings(curs=None):
1695 # don't use any of the run_*()s since that might 1696 # create a loop if we fail here 1697 # FIXME: use pg_settings 1698 try: 1699 curs.execute(u'show all') 1700 except: 1701 _log.exception(u'cannot log PG settings (>>>show all<<< failed)') 1702 return False 1703 settings = curs.fetchall() 1704 if settings is None: 1705 _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)') 1706 return False 1707 for setting in settings: 1708 _log.debug(u'PG option [%s]: %s', setting[0], setting[1]) 1709 return True
1710 # =======================================================================
1711 -def extract_msg_from_pg_exception(exc=None):
1712 1713 try: 1714 msg = exc.args[0] 1715 except (AttributeError, IndexError, TypeError): 1716 return u'cannot extract message from exception' 1717 1718 return unicode(msg, gmI18N.get_encoding(), 'replace')
1719 # =======================================================================
1720 -class cAuthenticationError(dbapi.OperationalError):
1721
1722 - def __init__(self, dsn=None, prev_val=None):
1723 self.dsn = dsn 1724 self.prev_val = prev_val
1725
1726 - def __str__(self):
1727 _log.warning('%s.__str__() called', self.__class__.__name__) 1728 tmp = u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn) 1729 _log.error(tmp) 1730 return tmp.encode(gmI18N.get_encoding(), 'replace')
1731
1732 - def __unicode__(self):
1733 return u'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
1734 1735 # ======================================================================= 1736 # custom psycopg2 extensions 1737 # =======================================================================
1738 -class cEncodingError(dbapi.OperationalError):
1739
1740 - def __init__(self, encoding=None, prev_val=None):
1741 self.encoding = encoding 1742 self.prev_val = prev_val
1743
1744 - def __str__(self):
1745 _log.warning('%s.__str__() called', self.__class__.__name__) 1746 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val.encode(gmI18N.get_encoding(), 'replace'), self.encoding.encode(gmI18N.get_encoding(), 'replace'))
1747
1748 - def __unicode__(self):
1749 return u'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
1750 1751 # ----------------------------------------------------------------------- 1752 # Python -> PostgreSQL 1753 # ----------------------------------------------------------------------- 1754 # test when Squeeze (and thus psycopg2 2.2 becomes Stable
1755 -class cAdapterPyDateTime(object):
1756
1757 - def __init__(self, dt):
1758 if dt.tzinfo is None: 1759 raise ValueError(u'datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat()) 1760 self.__dt = dt
1761
1762 - def getquoted(self):
1763 return _timestamp_template % self.__dt.isoformat()
1764 1765 ## remove for 0.9 1766 ## ---------------------------------------------------------------------- 1767 ##class cAdapterMxDateTime(object): 1768 ## 1769 ## def __init__(self, dt): 1770 ## if dt.tz == '???': 1771 ## _log.info('[%s]: no time zone string available in (%s), assuming local time zone', self.__class__.__name__, dt) 1772 ## self.__dt = dt 1773 ## 1774 ## def getquoted(self): 1775 ## # under some locale settings the mx.DateTime ISO formatter 1776 ## # will insert "," into the ISO string, 1777 ## # while this is allowed per the ISO8601 spec PostgreSQL 1778 ## # cannot currently handle that, 1779 ## # so map those "," to "." to make things work: 1780 ## return mxDT.ISO.str(self.__dt).replace(',', '.') 1781 ## 1782 ## ---------------------------------------------------------------------- 1783 ## PostgreSQL -> Python 1784 ## ---------------------------------------------------------------------- 1785 1786 #======================================================================= 1787 # main 1788 #----------------------------------------------------------------------- 1789 1790 # make sure psycopg2 knows how to handle unicode ... 1791 # intended to become standard 1792 # test when Squeeze (and thus psycopg2 2.2 becomes Stable 1793 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 1794 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY) 1795 1796 # tell psycopg2 how to adapt datetime types with timestamps when locales are in use 1797 # check in 0.9: 1798 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime) 1799 1800 ## remove for 0.9 1801 #try: 1802 # import mx.DateTime as mxDT 1803 ## psycopg2.extensions.register_adapter(mxDT.DateTimeType, cAdapterMxDateTime) 1804 #except ImportError: 1805 # _log.warning('cannot import mx.DateTime') 1806 1807 # do NOT adapt *lists* to "... IN (*) ..." syntax because we want 1808 # them adapted to "... ARRAY[]..." so we can support PG arrays 1809 1810 #======================================================================= 1811 if __name__ == "__main__": 1812 1813 if len(sys.argv) < 2: 1814 sys.exit() 1815 1816 if sys.argv[1] != 'test': 1817 sys.exit() 1818 1819 logging.basicConfig(level=logging.DEBUG) 1820 #--------------------------------------------------------------------
1821 - def test_file2bytea():
1822 run_rw_queries(queries = [ 1823 {'cmd': u'create table test_bytea (data bytea)'} 1824 ]) 1825 1826 cmd = u'insert into test_bytea values (%(data)s::bytea)' 1827 try: 1828 file2bytea(query = cmd, filename = sys.argv[2]) 1829 except: 1830 _log.exception('error') 1831 1832 run_rw_queries(queries = [ 1833 {'cmd': u'drop table test_bytea'} 1834 ])
1835 #--------------------------------------------------------------------
1836 - def test_get_connection():
1837 print "testing get_connection()" 1838 1839 dsn = 'foo' 1840 try: 1841 conn = get_connection(dsn=dsn) 1842 except dbapi.OperationalError, e: 1843 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1844 t, v = sys.exc_info()[:2] 1845 print ' ', t 1846 print ' ', v 1847 1848 dsn = 'dbname=gnumed_v9' 1849 try: 1850 conn = get_connection(dsn=dsn) 1851 except cAuthenticationError: 1852 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1853 t, v = sys.exc_info()[:2] 1854 print ' ', t 1855 print ' ', v 1856 1857 dsn = 'dbname=gnumed_v9 user=abc' 1858 try: 1859 conn = get_connection(dsn=dsn) 1860 except cAuthenticationError: 1861 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1862 t, v = sys.exc_info()[:2] 1863 print ' ', t 1864 print ' ', v 1865 1866 dsn = 'dbname=gnumed_v9 user=any-doc' 1867 try: 1868 conn = get_connection(dsn=dsn) 1869 except cAuthenticationError: 1870 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1871 t, v = sys.exc_info()[:2] 1872 print ' ', t 1873 print ' ', v 1874 1875 dsn = 'dbname=gnumed_v9 user=any-doc password=abc' 1876 try: 1877 conn = get_connection(dsn=dsn) 1878 except cAuthenticationError: 1879 print "SUCCESS: get_connection(%s) failed as expected" % dsn 1880 t, v = sys.exc_info()[:2] 1881 print ' ', t 1882 print ' ', v 1883 1884 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1885 conn = get_connection(dsn=dsn, readonly=True) 1886 1887 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1888 conn = get_connection(dsn=dsn, readonly=False) 1889 1890 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1891 encoding = 'foo' 1892 try: 1893 conn = get_connection(dsn=dsn, encoding=encoding) 1894 except cEncodingError: 1895 print "SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding) 1896 t, v = sys.exc_info()[:2] 1897 print ' ', t 1898 print ' ', v
1899 #--------------------------------------------------------------------
1900 - def test_exceptions():
1901 print "testing exceptions" 1902 1903 try: 1904 raise cAuthenticationError('no dsn', 'no previous exception') 1905 except cAuthenticationError: 1906 t, v, tb = sys.exc_info() 1907 print t 1908 print v 1909 print tb 1910 1911 try: 1912 raise cEncodingError('no dsn', 'no previous exception') 1913 except cEncodingError: 1914 t, v, tb = sys.exc_info() 1915 print t 1916 print v 1917 print tb
1918 #--------------------------------------------------------------------
1919 - def test_ro_queries():
1920 print "testing run_ro_queries()" 1921 1922 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc' 1923 conn = get_connection(dsn, readonly=True) 1924 1925 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1926 print data 1927 print idx 1928 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True) 1929 print data 1930 print idx 1931 1932 curs = conn.cursor() 1933 1934 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True) 1935 print data 1936 print idx 1937 1938 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True) 1939 print data 1940 print idx 1941 1942 try: 1943 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': u'selec 1'}], return_data=True, get_col_idx=True, verbose=True) 1944 print data 1945 print idx 1946 except psycopg2.ProgrammingError: 1947 print 'SUCCESS: run_ro_queries("selec 1") failed as expected' 1948 t, v = sys.exc_info()[:2] 1949 print ' ', t 1950 print ' ', v 1951 1952 curs.close()
1953 #--------------------------------------------------------------------
1954 - def test_request_dsn():
1955 conn = get_connection() 1956 print conn 1957 conn.close()
1958 #--------------------------------------------------------------------
1959 - def test_set_encoding():
1960 print "testing set_default_client_encoding()" 1961 1962 enc = 'foo' 1963 try: 1964 set_default_client_encoding(enc) 1965 print "SUCCESS: encoding [%s] worked" % enc 1966 except ValueError: 1967 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1968 t, v = sys.exc_info()[:2] 1969 print ' ', t 1970 print ' ', v 1971 1972 enc = '' 1973 try: 1974 set_default_client_encoding(enc) 1975 print "SUCCESS: encoding [%s] worked" % enc 1976 except ValueError: 1977 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1978 t, v = sys.exc_info()[:2] 1979 print ' ', t 1980 print ' ', v 1981 1982 enc = 'latin1' 1983 try: 1984 set_default_client_encoding(enc) 1985 print "SUCCESS: encoding [%s] worked" % enc 1986 except ValueError: 1987 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1988 t, v = sys.exc_info()[:2] 1989 print ' ', t 1990 print ' ', v 1991 1992 enc = 'utf8' 1993 try: 1994 set_default_client_encoding(enc) 1995 print "SUCCESS: encoding [%s] worked" % enc 1996 except ValueError: 1997 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 1998 t, v = sys.exc_info()[:2] 1999 print ' ', t 2000 print ' ', v 2001 2002 enc = 'unicode' 2003 try: 2004 set_default_client_encoding(enc) 2005 print "SUCCESS: encoding [%s] worked" % enc 2006 except ValueError: 2007 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2008 t, v = sys.exc_info()[:2] 2009 print ' ', t 2010 print ' ', v 2011 2012 enc = 'UNICODE' 2013 try: 2014 set_default_client_encoding(enc) 2015 print "SUCCESS: encoding [%s] worked" % enc 2016 except ValueError: 2017 print "SUCCESS: set_default_client_encoding(%s) failed as expected" % enc 2018 t, v = sys.exc_info()[:2] 2019 print ' ', t 2020 print ' ', v
2021 #--------------------------------------------------------------------
2022 - def test_connection_pool():
2023 dsn = get_default_dsn() 2024 pool = cConnectionPool(minconn=1, maxconn=2, dsn=None, verbose=False) 2025 print pool 2026 print pool.getconn() 2027 print pool.getconn() 2028 print pool.getconn() 2029 print type(pool.getconn())
2030 #--------------------------------------------------------------------
2031 - def test_list_args():
2032 dsn = get_default_dsn() 2033 conn = get_connection(dsn, readonly=True) 2034 curs = conn.cursor() 2035 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2036 #--------------------------------------------------------------------
2037 - def test_sanitize_pg_regex():
2038 tests = [ 2039 ['(', '\\('] 2040 , ['[', '\\['] 2041 , [')', '\\)'] 2042 ] 2043 for test in tests: 2044 result = sanitize_pg_regex(test[0]) 2045 if result != test[1]: 2046 print 'ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1])
2047 #--------------------------------------------------------------------
2048 - def test_is_pg_interval():
2049 status = True 2050 tests = [ 2051 [None, True], # None == NULL == succeeds ! 2052 [1, True], 2053 ['1', True], 2054 ['abc', False] 2055 ] 2056 2057 if not is_pg_interval(): 2058 print 'ERROR: is_pg_interval() returned "False", expected "True"' 2059 status = False 2060 2061 for test in tests: 2062 result = is_pg_interval(test[0]) 2063 if result != test[1]: 2064 print 'ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]) 2065 status = False 2066 2067 return status
2068 #--------------------------------------------------------------------
2069 - def test_sanity_check_time_skew():
2070 sanity_check_time_skew()
2071 #--------------------------------------------------------------------
2072 - def test_keyword_expansion():
2073 print "keywords, from database:" 2074 print get_text_expansion_keywords() 2075 print "keywords, cached:" 2076 print get_text_expansion_keywords() 2077 print "'$keyword' expands to:" 2078 print expand_keyword(keyword = u'$dvt')
2079 #--------------------------------------------------------------------
2080 - def test_get_foreign_key_details():
2081 for row in get_foreign_keys2column ( 2082 schema = u'dem', 2083 table = u'identity', 2084 column = u'pk' 2085 ): 2086 print '%s.%s references %s.%s.%s' % ( 2087 row['referencing_table'], 2088 row['referencing_column'], 2089 row['referenced_schema'], 2090 row['referenced_table'], 2091 row['referenced_column'] 2092 )
2093 #--------------------------------------------------------------------
2094 - def test_set_user_language():
2095 # (user, language, result, exception type) 2096 tests = [ 2097 # current user 2098 [None, 'de_DE', True], 2099 [None, 'lang_w/o_tx', False], 2100 [None, None, True], 2101 # valid user 2102 ['any-doc', 'de_DE', True], 2103 ['any-doc', 'lang_w/o_tx', False], 2104 ['any-doc', None, True], 2105 # invalid user 2106 ['invalid user', 'de_DE', None], 2107 ['invalid user', 'lang_w/o_tx', False], # lang checking happens before user checking 2108 ['invalid user', None, True] 2109 ] 2110 for test in tests: 2111 try: 2112 result = set_user_language(user = test[0], language = test[1]) 2113 if result != test[2]: 2114 print "test:", test 2115 print "result:", result, "expected:", test[2] 2116 except psycopg2.IntegrityError, e: 2117 if test[2] is None: 2118 continue 2119 print "test:", test 2120 print "expected exception" 2121 print "result:", e
2122 #--------------------------------------------------------------------
2123 - def test_get_schema_revision_history():
2124 for line in get_schema_revision_history(): 2125 print u' - '.join(line)
2126 #--------------------------------------------------------------------
2127 - def test_run_query():
2128 gmDateTime.init() 2129 args = {'dt': gmDateTime.pydt_max_here()} 2130 cmd = u"SELECT %(dt)s" 2131 2132 #cmd = u"SELECT 'infinity'::timestamp with time zone" 2133 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 2134 print rows
2135 #--------------------------------------------------------------------
2136 - def test_schema_exists():
2137 print schema_exists()
2138 2139 #-------------------------------------------------------------------- 2140 # run tests 2141 #test_file2bytea() 2142 #test_get_connection() 2143 #test_exceptions() 2144 #test_ro_queries() 2145 #test_request_dsn() 2146 #test_set_encoding() 2147 #test_connection_pool() 2148 #test_list_args() 2149 #test_sanitize_pg_regex() 2150 #test_is_pg_interval() 2151 #test_sanity_check_time_skew() 2152 #test_keyword_expansion() 2153 #test_get_foreign_key_details() 2154 #test_set_user_language() 2155 #test_get_schema_revision_history() 2156 #test_run_query() 2157 test_schema_exists() 2158 2159 # ====================================================================== 2160