1 """GNUmed PostgreSQL connection handling.
2
3 TODO: iterator/generator batch fetching:
4 - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad
5 - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4"
6
7 winner:
8 def resultset_functional_batchgenerator(cursor, size=100):
9 for results in iter(lambda: cursor.fetchmany(size), []):
10 for rec in results:
11 yield rec
12 """
13
14 __author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
15 __license__ = 'GPL v2 or later (details at http://www.gnu.org)'
16
17
18 import time
19 import sys
20 import os
21 import stat
22 import io
23 import codecs
24 import logging
25 import datetime as pydt
26 import re as regex
27 import threading
28 import hashlib
29 import shutil
30
31
32
33 if __name__ == '__main__':
34 sys.path.insert(0, '../../')
35 from Gnumed.pycommon import gmLoginInfo
36 from Gnumed.pycommon import gmExceptions
37 from Gnumed.pycommon import gmDateTime
38 from Gnumed.pycommon import gmBorg
39 from Gnumed.pycommon import gmI18N
40 from Gnumed.pycommon import gmLog2
41 from Gnumed.pycommon import gmTools
42 from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character, format_dict_like
43
44 _log = logging.getLogger('gm.db')
45
46
47
48 try:
49 import psycopg2 as dbapi
50 except ImportError:
51 _log.exception("Python database adapter psycopg2 not found.")
52 print("CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server.")
53 raise
54
55
56 _log.info('psycopg2 version: %s' % dbapi.__version__)
57 _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle))
58 if not (float(dbapi.apilevel) >= 2.0):
59 raise ImportError('gmPG2: supported DB-API level too low')
60 if not (dbapi.threadsafety > 0):
61 raise ImportError('gmPG2: lacking minimum thread safety in psycopg2')
62 if not (dbapi.paramstyle == 'pyformat'):
63 raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2')
64 try:
65 dbapi.__version__.index('dt')
66 except ValueError:
67 raise ImportError('gmPG2: lacking datetime support in psycopg2')
68 try:
69 dbapi.__version__.index('ext')
70 except ValueError:
71 raise ImportError('gmPG2: lacking extensions support in psycopg2')
72 try:
73 dbapi.__version__.index('pq3')
74 except ValueError:
75 raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2')
76
77 import psycopg2.extras
78 import psycopg2.extensions
79 import psycopg2.pool
80 import psycopg2.errorcodes as sql_error_codes
81
82
83 _default_client_encoding = 'UTF8'
84 _log.info('assuming default client encoding of [%s]' % _default_client_encoding)
85
86
87 _default_client_timezone = None
88 _sql_set_timezone = None
89 _timestamp_template = "cast('%s' as timestamp with time zone)"
90 FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone
91
92 _default_dsn = None
93 _default_login = None
94
95 default_database = 'gnumed_v22'
96
97 postgresql_version_string = None
98 postgresql_version = None
99
100 __ro_conn_pool = None
101
102 auto_request_login_params = True
103
104
105
106
107 known_schema_hashes = {
108 0: 'not released, testing only',
109 2: 'b09d50d7ed3f91ddf4c4ddb8ea507720',
110 3: 'e73718eaf230d8f1d2d01afa8462e176',
111 4: '4428ccf2e54c289136819e701bb095ea',
112 5: '7e7b093af57aea48c288e76632a382e5',
113 6: '90e2026ac2efd236da9c8608b8685b2d',
114 7: '6c9f6d3981483f8e9433df99d1947b27',
115 8: '89b13a7af83337c3aad153b717e52360',
116 9: '641a9b2be3c378ffc2bb2f0b1c9f051d',
117 10: '7ef42a8fb2bd929a2cdd0c63864b4e8a',
118 11: '03042ae24f3f92877d986fb0a6184d76',
119 12: '06183a6616db62257e22814007a8ed07',
120 13: 'fab7c1ae408a6530c47f9b5111a0841e',
121 14: 'e170d543f067d1ea60bfe9076b1560cf',
122 15: '70012ff960b77ecdff4981c94b5b55b6',
123 16: '0bcf44ca22c479b52976e5eda1de8161',
124 17: '161428ee97a00e3bf56168c3a15b7b50',
125 18: 'a0f9efcabdecfb4ddb6d8c0b69c02092',
126
127
128 19: '57f009a159f55f77525cc0291e0c8b60',
129 20: 'baed1901ed4c2f272b56c8cb2c6d88e8',
130 21: 'e6a51a89dd22b75b61ead8f7083f251f',
131 22: 'bf45f01327fb5feb2f5d3c06ba4a6792'
132 }
133
134 map_schema_hash2version = {
135 'b09d50d7ed3f91ddf4c4ddb8ea507720': 2,
136 'e73718eaf230d8f1d2d01afa8462e176': 3,
137 '4428ccf2e54c289136819e701bb095ea': 4,
138 '7e7b093af57aea48c288e76632a382e5': 5,
139 '90e2026ac2efd236da9c8608b8685b2d': 6,
140 '6c9f6d3981483f8e9433df99d1947b27': 7,
141 '89b13a7af83337c3aad153b717e52360': 8,
142 '641a9b2be3c378ffc2bb2f0b1c9f051d': 9,
143 '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10,
144 '03042ae24f3f92877d986fb0a6184d76': 11,
145 '06183a6616db62257e22814007a8ed07': 12,
146 'fab7c1ae408a6530c47f9b5111a0841e': 13,
147 'e170d543f067d1ea60bfe9076b1560cf': 14,
148 '70012ff960b77ecdff4981c94b5b55b6': 15,
149 '0bcf44ca22c479b52976e5eda1de8161': 16,
150 '161428ee97a00e3bf56168c3a15b7b50': 17,
151 'a0f9efcabdecfb4ddb6d8c0b69c02092': 18,
152
153
154 '57f009a159f55f77525cc0291e0c8b60': 19,
155 'baed1901ed4c2f272b56c8cb2c6d88e8': 20,
156 'e6a51a89dd22b75b61ead8f7083f251f': 21,
157 'bf45f01327fb5feb2f5d3c06ba4a6792': 22
158 }
159
160 map_client_branch2required_db_version = {
161 'GIT tree': 0,
162 '0.3': 9,
163 '0.4': 10,
164 '0.5': 11,
165 '0.6': 12,
166 '0.7': 13,
167 '0.8': 14,
168 '0.9': 15,
169 '1.0': 16,
170 '1.1': 16,
171 '1.2': 17,
172 '1.3': 18,
173 '1.4': 19,
174 '1.5': 20,
175 '1.6': 21,
176 '1.7': 22,
177 '1.8': 22
178 }
179
180 map_psyco_tx_status2str = [
181 'TRANSACTION_STATUS_IDLE',
182 'TRANSACTION_STATUS_ACTIVE',
183 'TRANSACTION_STATUS_INTRANS',
184 'TRANSACTION_STATUS_INERROR',
185 'TRANSACTION_STATUS_UNKNOWN'
186 ]
187
188 map_psyco_conn_status2str = [
189 '0 - ?',
190 'STATUS_READY',
191 'STATUS_BEGIN_ALIAS_IN_TRANSACTION',
192 'STATUS_PREPARED'
193 ]
194
195 map_psyco_iso_level2str = {
196 None: 'ISOLATION_LEVEL_DEFAULT (configured on server)',
197 0: 'ISOLATION_LEVEL_AUTOCOMMIT',
198 1: 'ISOLATION_LEVEL_READ_UNCOMMITTED',
199 2: 'ISOLATION_LEVEL_REPEATABLE_READ',
200 3: 'ISOLATION_LEVEL_SERIALIZABLE',
201 4: 'ISOLATION_LEVEL_READ_UNCOMMITTED'
202 }
203
204
205 query_table_col_defs = """select
206 cols.column_name,
207 cols.udt_name
208 from
209 information_schema.columns cols
210 where
211 cols.table_schema = %s
212 and
213 cols.table_name = %s
214 order by
215 cols.ordinal_position"""
216
217 query_table_attributes = """select
218 cols.column_name
219 from
220 information_schema.columns cols
221 where
222 cols.table_schema = %s
223 and
224 cols.table_name = %s
225 order by
226 cols.ordinal_position"""
227
228
229
230 SQL_foreign_key_name = """SELECT
231 fk_tbl.*,
232 (SELECT nspname FROM pg_catalog.pg_namespace WHERE oid = fk_tbl.connamespace) AS constraint_schema,
233 fk_tbl.conname AS constraint_name,
234 (SELECT nspname FROM pg_catalog.pg_namespace WHERE oid = (SELECT relnamespace FROM pg_class where oid = fk_tbl.conrelid)) AS source_schema,
235 (SELECT relname FROM pg_catalog.pg_class where oid = fk_tbl.conrelid) AS source_table,
236 (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = fk_tbl.conkey[1] AND attrelid = (%(src_schema)s || '.' || %(src_tbl)s)::regclass) AS source_column,
237 (SELECT nspname FROM pg_catalog.pg_namespace WHERE oid = (SELECT relnamespace FROM pg_class where oid = fk_tbl.confrelid)) AS target_schema,
238 (SELECT relname FROM pg_catalog.pg_class where oid = fk_tbl.confrelid) AS target_table,
239 (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = fk_tbl.confkey[1] AND attrelid = (%(target_schema)s || '.' || %(target_tbl)s)::regclass) AS target_column
240 FROM
241 pg_catalog.pg_constraint fk_tbl
242 WHERE
243 fk_tbl.contype = 'f'
244 AND
245 fk_tbl.conrelid = (%(src_schema)s || '.' || %(src_tbl)s)::regclass
246 AND
247 fk_tbl.conkey[1] = (
248 SELECT
249 col_tbl1.attnum
250 FROM
251 pg_catalog.pg_attribute col_tbl1
252 WHERE
253 col_tbl1.attname = %(src_col)s
254 AND
255 col_tbl1.attrelid = (%(src_schema)s || '.' || %(src_tbl)s)::regclass
256 )
257 AND
258 fk_tbl.confrelid = (%(target_schema)s || '.' || %(target_tbl)s)::regclass
259 AND
260 fk_tbl.confkey[1] = (
261 SELECT
262 col_tbl2.attnum
263 FROM
264 pg_catalog.pg_attribute col_tbl2
265 WHERE
266 col_tbl2.attname = %(target_col)s
267 AND
268 col_tbl2.attrelid = (%(target_schema)s || '.' || %(target_tbl)s)::regclass
269 )
270 """
271
272 SQL_get_index_name = """
273 SELECT
274 (SELECT nspname FROM pg_namespace WHERE pg_namespace.oid = pg_class.relnamespace)
275 AS index_schema,
276 pg_class.relname
277 AS index_name
278 FROM
279 pg_class
280 WHERE
281 pg_class.oid IN (
282 SELECT
283 indexrelid
284 FROM
285 pg_index
286 WHERE
287 pg_index.indrelid = %(idx_tbl)s::regclass
288 AND
289 pg_index.indnatts = 1 -- only one column in index
290 AND
291 pg_index.indkey[0] IN (
292 SELECT
293 pg_attribute.attnum
294 FROM
295 pg_attribute
296 WHERE
297 pg_attribute.attrelid = %(idx_tbl)s::regclass
298 AND
299 pg_attribute.attname = %(idx_col)s
300 )
301 )
302 """
303
304 SQL_get_pk_col_def = """
305 SELECT
306 pg_attribute.attname
307 AS pk_col,
308 format_type(pg_attribute.atttypid, pg_attribute.atttypmod)
309 AS pk_type
310 FROM pg_index, pg_class, pg_attribute, pg_namespace
311 WHERE
312 pg_class.oid = %(table)s::regclass
313 AND
314 indrelid = pg_class.oid
315 AND
316 -- nspname = %%(schema)s
317 -- AND
318 pg_class.relnamespace = pg_namespace.oid
319 AND
320 pg_attribute.attrelid = pg_class.oid
321 AND
322 pg_attribute.attnum = any(pg_index.indkey)
323 AND
324 indisprimary
325 """
326
327
328
329
331
332 if encoding not in psycopg2.extensions.encodings:
333 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding)
334
335 py_enc = psycopg2.extensions.encodings[encoding]
336 try:
337 codecs.lookup(py_enc)
338 except LookupError:
339 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc))
340 raise
341
342
343
344
345 global _default_client_encoding
346 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, encoding))
347 _default_client_encoding = encoding
348 return True
349
350
362
363
365
366 _log.debug('validating time zone [%s]', timezone)
367
368 cmd = 'set timezone to %(tz)s'
369 args = {'tz': timezone}
370
371 conn.commit()
372 curs = conn.cursor()
373 is_valid = False
374 try:
375 curs.execute(cmd, args)
376 _log.info('time zone [%s] is settable', timezone)
377
378 cmd = """select '1920-01-19 23:00:00+01'::timestamp with time zone"""
379 try:
380 curs.execute(cmd)
381 curs.fetchone()
382 _log.info('time zone [%s] is usable', timezone)
383 is_valid = True
384 except:
385 _log.error('error using time zone [%s]', timezone)
386 except dbapi.DataError:
387 _log.warning('time zone [%s] is not settable', timezone)
388 except:
389 _log.error('failed to set time zone to [%s]', timezone)
390 _log.exception('')
391
392 curs.close()
393 conn.rollback()
394
395 return is_valid
396
397
399 """some timezone defs are abbreviations so try to expand
400 them because "set time zone" doesn't take abbreviations"""
401
402 cmd = """
403 select distinct on (abbrev) name
404 from pg_timezone_names
405 where
406 abbrev = %(tz)s and
407 name ~ '^[^/]+/[^/]+$' and
408 name !~ '^Etc/'
409 """
410 args = {'tz': timezone}
411
412 conn.commit()
413 curs = conn.cursor()
414
415 result = timezone
416 try:
417 curs.execute(cmd, args)
418 rows = curs.fetchall()
419 if len(rows) > 0:
420 result = rows[0]['name']
421 _log.debug('[%s] maps to [%s]', timezone, result)
422 except:
423 _log.exception('cannot expand timezone abbreviation [%s]', timezone)
424
425 curs.close()
426 conn.rollback()
427
428 return result
429
430
476
477
478
479
481 """Text mode request of database login parameters"""
482 import getpass
483 login = gmLoginInfo.LoginInfo()
484
485 print("\nPlease enter the required login parameters:")
486 try:
487 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '')
488 login.database = prompted_input(prompt = "database", default = default_database)
489 login.user = prompted_input(prompt = "user name", default = '')
490 tmp = 'password for "%s" (not shown): ' % login.user
491 login.password = getpass.getpass(tmp)
492 gmLog2.add_word2hide(login.password)
493 login.port = prompted_input(prompt = "port", default = 5432)
494 except KeyboardInterrupt:
495 _log.warning("user cancelled text mode login dialog")
496 print("user cancelled text mode login dialog")
497 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!"))
498
499 return login
500
501
503 """GUI (wx) input request for database login parameters.
504
505 Returns gmLoginInfo.LoginInfo object
506 """
507 import wx
508
509
510 if wx.GetApp() is None:
511 raise AssertionError(_("The wxPython GUI framework hasn't been initialized yet!"))
512
513
514
515 import gmAuthWidgets
516 dlg = gmAuthWidgets.cLoginDialog(None, -1)
517 dlg.ShowModal()
518 login = dlg.panel.GetLoginInfo()
519 dlg.Destroy()
520
521
522 if login is None:
523 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!"))
524
525 gmLog2.add_word2hide(login.password)
526
527 return login
528
529
531 """Request login parameters for database connection."""
532
533 if not auto_request_login_params:
534 raise Exception('Cannot request login parameters.')
535
536
537
538 if 'DISPLAY' in os.environ:
539
540 try:
541 return __request_login_params_gui_wx()
542 except:
543 pass
544
545
546
547 return __request_login_params_tui()
548
549
550
551
552 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
553 dsn_parts = []
554
555 if (database is not None) and (database.strip() != ''):
556 dsn_parts.append('dbname=%s' % database)
557
558 if (host is not None) and (host.strip() != ''):
559 dsn_parts.append('host=%s' % host)
560
561 if (port is not None) and (str(port).strip() != ''):
562 dsn_parts.append('port=%s' % port)
563
564 if (user is not None) and (user.strip() != ''):
565 dsn_parts.append('user=%s' % user)
566
567 if (password is not None) and (password.strip() != ''):
568 dsn_parts.append('password=%s' % password)
569
570 dsn_parts.append('sslmode=prefer')
571 dsn_parts.append('fallback_application_name=GNUmed')
572
573 return ' '.join(dsn_parts)
574
575
580
581
591
592
594 if login is None:
595 return False
596
597 if login.host is not None:
598 if login.host.strip() == '':
599 login.host = None
600
601 global _default_login
602 _default_login = login
603 _log.info('setting default login from [%s] to [%s]' % (_default_login, login))
604
605 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password)
606
607 global _default_dsn
608 if _default_dsn is None:
609 old_dsn = 'None'
610 else:
611 old_dsn = regex.sub(r'password=[^\s]+', 'password=%s' % u_replacement_character, _default_dsn)
612 _log.info ('setting default DSN from [%s] to [%s]',
613 old_dsn,
614 regex.sub(r'password=[^\s]+', 'password=%s' % u_replacement_character, dsn)
615 )
616 _default_dsn = dsn
617
618 return True
619
620
622 try:
623 pgpass_file = os.path.expanduser(os.path.join('~', '.pgpass'))
624 if os.path.exists(pgpass_file):
625 _log.debug('standard .pgpass (%s) exists', pgpass_file)
626 else:
627 _log.debug('standard .pgpass (%s) not found', pgpass_file)
628 pgpass_var = os.getenv('PGPASSFILE')
629 if pgpass_var is None:
630 _log.debug('$PGPASSFILE not set')
631 else:
632 if os.path.exists(pgpass_var):
633 _log.debug('$PGPASSFILE=%s exists', pgpass_var)
634 else:
635 _log.debug('$PGPASSFILE=%s not found')
636 except Exception:
637 _log.exception('cannot detect .pgpass and or $PGPASSFILE')
638
639
640
641
643 expected_hash = known_schema_hashes[version]
644 if version == 0:
645 args = {'ver': 9999}
646 else:
647 args = {'ver': version}
648 rows, idx = run_ro_queries (
649 link_obj = link_obj,
650 queries = [{
651 'cmd': 'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5',
652 'args': args
653 }]
654 )
655 if rows[0]['md5'] != expected_hash:
656 _log.error('database schema version mismatch')
657 _log.error('expected: %s (%s)' % (version, expected_hash))
658 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5']))
659 if verbose:
660 _log.debug('schema dump follows:')
661 for line in get_schema_structure(link_obj = link_obj).split():
662 _log.debug(line)
663 _log.debug('schema revision history dump follows:')
664 for line in get_schema_revision_history(link_obj = link_obj):
665 _log.debug(' - '.join(line))
666 return False
667 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5']))
668 return True
669
670
672 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': 'select md5(gm.concat_table_structure()) as md5'}])
673 try:
674 return map_schema_hash2version[rows[0]['md5']]
675 except KeyError:
676 return 'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
677
678
680 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': 'select gm.concat_table_structure()'}])
681 return rows[0][0]
682
683
685 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': 'select md5(gm.concat_table_structure()) as md5'}])
686 return rows[0]['md5']
687
688
690
691 if table_exists(link_obj = link_obj, schema = 'gm', table = 'schema_revision'):
692 cmd = """
693 SELECT
694 imported::text,
695 version,
696 filename
697 FROM gm.schema_revision
698 ORDER BY imported"""
699 elif table_exists(link_obj = link_obj, schema = 'public', table = 'gm_schema_revision'):
700 cmd = """
701 SELECT
702 imported::text,
703 version,
704 filename
705 FROM public.gm_schema_revision
706 ORDER BY imported"""
707 else:
708 return []
709
710 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}])
711 return rows
712
714 rows, idx = run_ro_queries(queries = [{'cmd': 'select CURRENT_USER'}])
715 return rows[0][0]
716
717
719 """Get the foreign keys pointing to schema.table.column.
720
721 Does not properly work with multi-column FKs.
722 GNUmed doesn't use any, however.
723 """
724 args = {
725 'schema': schema,
726 'tbl': table,
727 'col': column
728 }
729 cmd = """
730 SELECT
731 %(schema)s AS referenced_schema,
732 %(tbl)s AS referenced_table,
733 %(col)s AS referenced_column,
734 pgc.confkey AS referenced_column_list,
735
736 pgc.conrelid::regclass AS referencing_table,
737 pgc.conkey AS referencing_column_list,
738 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) AS referencing_column
739 FROM
740 pg_constraint pgc
741 WHERE
742 pgc.contype = 'f'
743 AND
744 pgc.confrelid = (
745 select oid from pg_class where relname = %(tbl)s and relnamespace = (
746 select oid from pg_namespace where nspname = %(schema)s
747 )
748 ) and
749 (
750 select attnum
751 from pg_attribute
752 where
753 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = (
754 select oid from pg_namespace where nspname = %(schema)s
755 ))
756 and
757 attname = %(col)s
758 ) = any(pgc.confkey)
759 """
760 rows, idx = run_ro_queries (
761 link_obj = link_obj,
762 queries = [
763 {'cmd': cmd, 'args': args}
764 ]
765 )
766
767 return rows
768
769
770 -def get_index_name(indexed_table=None, indexed_column=None, link_obj=None):
771
772 args = {
773 'idx_tbl': indexed_table,
774 'idx_col': indexed_column
775 }
776 rows, idx = run_ro_queries (
777 link_obj = link_obj,
778 queries = [{'cmd': SQL_get_index_name, 'args': args}],
779 get_col_idx = False
780 )
781
782 return rows
783
784
785 -def get_foreign_key_names(src_schema=None, src_table=None, src_column=None, target_schema=None, target_table=None, target_column=None, link_obj=None):
786
787 args = {
788 'src_schema': src_schema,
789 'src_tbl': src_table,
790 'src_col': src_column,
791 'target_schema': target_schema,
792 'target_tbl': target_table,
793 'target_col': target_column
794 }
795
796 rows, idx = run_ro_queries (
797 link_obj = link_obj,
798 queries = [{'cmd': SQL_foreign_key_name, 'args': args}],
799 get_col_idx = False
800 )
801
802 return rows
803
804
806 """Return child tables of <table>."""
807 cmd = """
808 select
809 pgn.nspname as namespace,
810 pgc.relname as table
811 from
812 pg_namespace pgn,
813 pg_class pgc
814 where
815 pgc.relnamespace = pgn.oid
816 and
817 pgc.oid in (
818 select inhrelid from pg_inherits where inhparent = (
819 select oid from pg_class where
820 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and
821 relname = %(table)s
822 )
823 )"""
824 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}])
825 return rows
826
827
829 cmd = """SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)"""
830 args = {'schema': schema}
831 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}])
832 return rows[0][0]
833
834
836 """Returns false, true."""
837 cmd = """
838 select exists (
839 select 1 from information_schema.tables
840 where
841 table_schema = %s and
842 table_name = %s and
843 table_type = 'BASE TABLE'
844 )"""
845 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}])
846 return rows[0][0]
847
848
850
851 cmd = """
852 SELECT EXISTS (
853 SELECT 1 FROM pg_proc
854 WHERE proname = %(func)s AND pronamespace = (SELECT oid FROM pg_namespace WHERE nspname = %(schema)s)
855 )
856 """
857 args = {
858 'func': function,
859 'schema': schema
860 }
861 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}])
862 return rows[0][0]
863
864
866 if cursor.description is None:
867 _log.error('no result description available: unused cursor or last query did not select rows')
868 return None
869 col_indices = {}
870 col_index = 0
871 for col_desc in cursor.description:
872 col_name = col_desc[0]
873
874
875
876 if col_name in col_indices:
877 col_name = '%s_%s' % (col_name, col_index)
878 col_indices[col_name] = col_index
879 col_index += 1
880
881 return col_indices
882
883 -def get_col_defs(link_obj=None, schema='public', table=None):
884 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}])
885 col_names = []
886 col_type = {}
887 for row in rows:
888 col_names.append(row[0])
889
890 if row[1].startswith('_'):
891 col_type[row[0]] = row[1][1:] + '[]'
892 else:
893 col_type[row[0]] = row[1]
894 col_defs = []
895 col_defs.append(col_names)
896 col_defs.append(col_type)
897 return col_defs
898
900 """Return column attributes of table"""
901 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}])
902 cols = []
903 for row in rows:
904 cols.append(row[0])
905 return cols
906
907
908
909
911 tx_file = io.open(filename, mode = 'wt', encoding = 'utf8')
912 tx_file.write('-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M'))
913 tx_file.write('-- - contains translations for each of [%s]\n' % ', '.join(get_translation_languages()))
914 tx_file.write('-- - user database language is set to [%s]\n\n' % get_current_user_language())
915 tx_file.write('-- Please email this file to <gnumed-devel@gnu.org>.\n')
916 tx_file.write('-- ----------------------------------------------------------------------------------------------\n\n')
917 tx_file.write('set default_transaction_read_only to off;\n\n')
918 tx_file.write("set client_encoding to 'utf-8';\n\n")
919 tx_file.write('\\unset ON_ERROR_STOP\n\n')
920
921 cmd = 'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig'
922 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False)
923 for row in rows:
924 line = "select i18n.upd_tx(E'%s', E'%s', E'%s');\n" % (
925 row['lang'].replace("'", "\\'"),
926 row['orig'].replace("'", "\\'"),
927 row['trans'].replace("'", "\\'")
928 )
929 tx_file.write(line)
930 tx_file.write('\n')
931
932 tx_file.write('\set ON_ERROR_STOP 1\n')
933 tx_file.close()
934
935 return True
936
937
939 cmd = 'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s'
940 args = {'lang': language, 'orig': original}
941 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True)
942 return True
943
944
946 if language is None:
947 cmd = 'SELECT i18n.upd_tx(%(orig)s, %(trans)s)'
948 else:
949 cmd = 'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)'
950 args = {'lang': language, 'orig': original, 'trans': translation}
951 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False, link_obj = link_obj)
952 return args
953
954
956 rows, idx = run_ro_queries (
957 queries = [{'cmd': 'select distinct lang from i18n.translations'}]
958 )
959 return [ r[0] for r in rows ]
960
961
963
964 args = {'lang': language}
965 _log.debug('language [%s]', language)
966
967 if order_by is None:
968 order_by = 'ORDER BY %s' % order_by
969 else:
970 order_by = 'ORDER BY lang, orig'
971
972 if language is None:
973 cmd = """
974 SELECT DISTINCT ON (orig, lang)
975 lang, orig, trans
976 FROM ((
977
978 -- strings stored as translation keys whether translated or not
979 SELECT
980 NULL as lang,
981 ik.orig,
982 NULL AS trans
983 FROM
984 i18n.keys ik
985
986 ) UNION ALL (
987
988 -- already translated strings
989 SELECT
990 it.lang,
991 it.orig,
992 it.trans
993 FROM
994 i18n.translations it
995
996 )) as translatable_strings
997 %s""" % order_by
998 else:
999 cmd = """
1000 SELECT DISTINCT ON (orig, lang)
1001 lang, orig, trans
1002 FROM ((
1003
1004 -- strings stored as translation keys whether translated or not
1005 SELECT
1006 %%(lang)s as lang,
1007 ik.orig,
1008 i18n._(ik.orig, %%(lang)s) AS trans
1009 FROM
1010 i18n.keys ik
1011
1012 ) UNION ALL (
1013
1014 -- already translated strings
1015 SELECT
1016 %%(lang)s as lang,
1017 it.orig,
1018 i18n._(it.orig, %%(lang)s) AS trans
1019 FROM
1020 i18n.translations it
1021
1022 )) AS translatable_strings
1023 %s""" % order_by
1024
1025 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
1026
1027 if rows is None:
1028 _log.error('no translatable strings found')
1029 else:
1030 _log.debug('%s translatable strings found', len(rows))
1031
1032 return rows
1033
1034
1036 cmd = 'select i18n.get_curr_lang()'
1037 rows, idx = run_ro_queries(queries = [{'cmd': cmd}])
1038 return rows[0][0]
1039
1040
1042 """Set the user language in the database.
1043
1044 user = None: current db user
1045 language = None: unset
1046 """
1047 _log.info('setting database language for user [%s] to [%s]', user, language)
1048
1049 args = {
1050 'usr': user,
1051 'lang': language
1052 }
1053
1054 if language is None:
1055 if user is None:
1056 queries = [{'cmd': 'select i18n.unset_curr_lang()'}]
1057 else:
1058 queries = [{'cmd': 'select i18n.unset_curr_lang(%(usr)s)', 'args': args}]
1059 queries.append({'cmd': 'select True'})
1060 else:
1061 if user is None:
1062 queries = [{'cmd': 'select i18n.set_curr_lang(%(lang)s)', 'args': args}]
1063 else:
1064 queries = [{'cmd': 'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}]
1065
1066 rows, idx = run_rw_queries(queries = queries, return_data = True)
1067
1068 if not rows[0][0]:
1069 _log.error('cannot set database language to [%s] for user [%s]', language, user)
1070
1071 return rows[0][0]
1072
1074 """Set the user language in the database.
1075
1076 - regardless of whether there is any translation available.
1077 - only for the current user
1078 """
1079 _log.info('forcing database language for current db user to [%s]', language)
1080
1081 run_rw_queries(queries = [{
1082 'cmd': 'select i18n.force_curr_lang(%(lang)s)',
1083 'args': {'lang': language}
1084 }])
1085
1086
1087
1088
1090 cmd = 'notify "db_maintenance_warning"'
1091 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
1092
1093
1095 cmd = 'notify "db_maintenance_disconnect"'
1096 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
1097
1098
1100 cmd = 'SELECT %(candidate)s::interval'
1101 try:
1102 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
1103 return True
1104 except:
1105 cmd = 'SELECT %(candidate)s::text::interval'
1106 try:
1107 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
1108 return True
1109 except:
1110 return False
1111
1112
1113 -def lock_row(link_obj=None, table=None, pk=None, exclusive=False):
1114 """Uses pg_advisory(_shared).
1115
1116 - locks stack upon each other and need one unlock per lock
1117 - same connection:
1118 - all locks succeed
1119 - different connections:
1120 - shared + shared succeed
1121 - shared + exclusive fail
1122 """
1123 _log.debug('locking row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1124 if exclusive:
1125 cmd = """SELECT pg_try_advisory_lock('%s'::regclass::oid::int, %s)""" % (table, pk)
1126 else:
1127 cmd = """SELECT pg_try_advisory_lock_shared('%s'::regclass::oid::int, %s)""" % (table, pk)
1128 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False)
1129 if rows[0][0]:
1130 return True
1131 _log.warning('cannot lock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1132 return False
1133
1134
1135 -def unlock_row(link_obj=None, table=None, pk=None, exclusive=False):
1136 """Uses pg_advisory_unlock(_shared).
1137
1138 - each lock needs one unlock
1139 """
1140 _log.debug('trying to unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1141 if exclusive:
1142 cmd = "SELECT pg_advisory_unlock('%s'::regclass::oid::int, %s)" % (table, pk)
1143 else:
1144 cmd = "SELECT pg_advisory_unlock_shared('%s'::regclass::oid::int, %s)" % (table, pk)
1145 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False)
1146 if rows[0][0]:
1147 return True
1148 _log.warning('cannot unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1149 return False
1150
1151
1153 """Looks at pk_locks
1154
1155 - does not take into account locks other than 'advisory', however
1156 """
1157 cmd = """SELECT EXISTS (
1158 SELECT 1 FROM pg_locks WHERE
1159 classid = '%s'::regclass::oid::int
1160 AND
1161 objid = %s
1162 AND
1163 locktype = 'advisory'
1164 )""" % (table, pk)
1165 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False)
1166 if rows[0][0]:
1167 _log.debug('row is locked: [%s] [%s]', table, pk)
1168 return True
1169 _log.debug('row is NOT locked: [%s] [%s]', table, pk)
1170 return False
1171
1172
1174
1175 md5 = hashlib.md5()
1176 md5.update(('%s' % cache_key_data).encode('utf8'))
1177 md5_sum = md5.hexdigest()
1178 cached_name = os.path.join(gmTools.gmPaths().bytea_cache_dir, md5_sum)
1179 _log.debug('caching [%s] as [%s]', filename, cached_name)
1180 gmTools.remove_file(cached_name, log_error = True, force = True)
1181 try:
1182 shutil.copyfile(filename, cached_name, follow_symlinks = True)
1183 except shutil.SameFileError:
1184 pass
1185 except OSError:
1186 _log.exception('cannot copy file into cache: [%s] -> [%s]', filename, cached_name)
1187 return None
1188 PERMS_owner_only = 0o0660
1189 try:
1190 os.chmod(cached_name, PERMS_owner_only)
1191 except PermissionError:
1192 _log.exception('cannot set cache file [%s] permissions to [%s]', cached_name, stat.filemode(PERMS_owner_only))
1193 return None
1194 return cached_name
1195
1196
1198
1199 md5 = hashlib.md5()
1200 md5.update(('%s' % cache_key_data).encode('utf8'))
1201 md5_sum = md5.hexdigest()
1202 cached_name = os.path.join(gmTools.gmPaths().bytea_cache_dir, md5_sum)
1203 _log.debug('[%s]: %s', md5_sum, cached_name)
1204 try:
1205 stat = os.stat(cached_name)
1206 except FileNotFoundError:
1207 return None
1208 _log.debug('cache hit: %s [%s]', cached_name, stat)
1209 if os.path.islink(cached_name) or (not os.path.isfile(cached_name)):
1210 _log.error('object in cache is not a regular file: %s', cached_name)
1211 _log.error('possibly an attack, removing')
1212 removed = gmTools.remove_file(cached_name, log_error = True)
1213 if removed:
1214 return None
1215 raise BaseException('cannot delete suspicious object in cache dir: %s', cached_name)
1216 if stat.st_size == data_size:
1217 return cached_name
1218 _log.debug('size in cache [%s] <> expected size [%s], removing cached file', stat.st_size, data_size)
1219 removed = gmTools.remove_file(cached_name, log_error = True)
1220 if removed:
1221 return None
1222 raise BaseException('cannot remove suspicous object from cache dir: %s', cached_name)
1223
1224
1225 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None, conn=None):
1226
1227 if data_size is None:
1228 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query])
1229 data_size = rows[0][0]
1230 if data_size in [None, 0]:
1231 conn.rollback()
1232 return True
1233
1234 cache_key_data = '%s' % data_query
1235 cached_filename = __get_filename_in_cache(cache_key_data = cache_key_data, data_size = data_size)
1236 if cached_filename is not None:
1237 gmTools.mklink(cached_filename, filename, overwrite = False)
1238 return True
1239
1240 outfile = io.open(filename, 'wb')
1241 result = bytea2file_object (
1242 data_query = data_query,
1243 file_obj = outfile,
1244 chunk_size = chunk_size,
1245 data_size = data_size,
1246 data_size_query = data_size_query,
1247 conn = conn
1248 )
1249 outfile.close()
1250 __store_file_in_cache(filename, cache_key_data)
1251
1252 return result
1253
1254
1255 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None, conn=None):
1256 """Store data from a bytea field into a file.
1257
1258 <data_query>
1259 - dict {'cmd': ..., 'args': ...}
1260 - 'cmd' must be a string containing "... substring(data from %(start)s for %(size)s) ..."
1261 - 'args' must be a dict
1262 - must return one row with one field of type bytea
1263 <file>
1264 - must be a file like Python object
1265 <data_size>
1266 - integer of the total size of the expected data or None
1267 <data_size_query>
1268 - dict {'cmd': ..., 'args': ...}
1269 - must return one row with one field with the octet_length() of the data field
1270 - used only when <data_size> is None
1271 """
1272 if data_size == 0:
1273 return True
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286 if conn is None:
1287 conn = get_raw_connection(readonly = True)
1288
1289 if data_size is None:
1290 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query])
1291 data_size = rows[0][0]
1292 if data_size in [None, 0]:
1293 conn.rollback()
1294 return True
1295
1296 max_chunk_size = 1024 * 1024 * 20
1297 if chunk_size == 0:
1298 chunk_size = min(data_size, max_chunk_size)
1299
1300 _log.debug('expecting %s bytes of BYTEA data in chunks of %s bytes', data_size, chunk_size)
1301
1302
1303
1304
1305 needed_chunks, remainder = divmod(data_size, chunk_size)
1306 _log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder)
1307
1308
1309
1310
1311
1312
1313
1314 for chunk_id in range(needed_chunks):
1315 chunk_start = (chunk_id * chunk_size) + 1
1316 data_query['args']['start'] = chunk_start
1317 data_query['args']['size'] = chunk_size
1318 try:
1319 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
1320 except:
1321 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size))
1322 conn.rollback()
1323 raise
1324
1325 file_obj.write(rows[0][0])
1326
1327
1328 if remainder > 0:
1329 chunk_start = (needed_chunks * chunk_size) + 1
1330 data_query['args']['start'] = chunk_start
1331 data_query['args']['size'] = remainder
1332 try:
1333 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
1334 except:
1335 _log.error('cannot retrieve remaining [%s] bytes' % remainder)
1336 conn.rollback()
1337 raise
1338
1339 file_obj.write(rows[0][0])
1340
1341 conn.rollback()
1342 return True
1343
1344
1345 -def file2bytea(query=None, filename=None, args=None, conn=None, file_md5=None):
1346 """Store data from a file into a bytea field.
1347
1348 The query must:
1349 - be in unicode
1350 - contain a format spec identifying the row (eg a primary key)
1351 matching <args> if it is an UPDATE
1352 - contain a format spec " <field> = %(data)s::bytea"
1353
1354 The query CAN return the MD5 of the inserted data:
1355 RETURNING md5(<field>) AS md5
1356 in which case it will compare it to the md5
1357 of the file.
1358 """
1359
1360 infile = open(filename, "rb")
1361 data_as_byte_string = infile.read()
1362 infile.close()
1363 if args is None:
1364 args = {}
1365
1366 args['data'] = memoryview(data_as_byte_string)
1367 del(data_as_byte_string)
1368
1369
1370 if conn is None:
1371 conn = get_raw_connection(readonly = False)
1372 close_conn = True
1373 else:
1374 close_conn = False
1375
1376 rows, idx = run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = False, return_data = (file_md5 is not None))
1377
1378 success_status = True
1379 if file_md5 is None:
1380 conn.commit()
1381 else:
1382 db_md5 = rows[0]['md5']
1383 if file_md5 != db_md5:
1384 conn.rollback()
1385 success_status = False
1386 _log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
1387 else:
1388 conn.commit()
1389 _log.debug('MD5 sums of data file and database BYTEA field match: [file::%s] = [DB::%s]', file_md5, db_md5)
1390
1391 if close_conn:
1392 conn.close()
1393
1394 return success_status
1395
1396
1397 -def file2lo(filename=None, conn=None, check_md5=False):
1398
1399 file_size = os.path.getsize(filename)
1400 if file_size > (1024 * 1024) * 1024:
1401 _log.debug('file size of [%s] > 1 GB, supposedly not supported by psycopg2 large objects (but seems to work anyway ?)', file_size)
1402
1403
1404 if conn is None:
1405 conn = get_raw_connection(readonly = False)
1406 close_conn = conn.close
1407 else:
1408 close_conn = __noop
1409 _log.debug('[%s] -> large object', filename)
1410
1411
1412 lo = conn.lobject(0, 'w', 0, filename)
1413 lo_oid = lo.oid
1414 lo.close()
1415 _log.debug('large object OID: %s', lo_oid)
1416
1417
1418 if file_md5 is None:
1419 conn.commit()
1420 close_conn()
1421 return lo_oid
1422 cmd = 'SELECT md5(lo_get(%(loid)s::oid))'
1423 args = {'loid': lo_oid}
1424 rows, idx = run_ro_queries(link_obj = conn, queries = [{'cmd': cmd, 'args': args}])
1425 db_md5 = rows[0][0]
1426 if file_md5 == db_md5:
1427 conn.commit()
1428 close_conn()
1429 _log.debug('MD5 sums of data file and database large object match: [file::%s] = [DB::%s]', file_md5, db_md5)
1430 return lo_oid
1431 conn.rollback()
1432 close_conn()
1433 _log.error('MD5 sums of data file and database large object [%s] do not match: [file::%s] <> [DB::%s]', lo_oid, file_md5, db_md5)
1434 return -1
1435
1436
1438
1439 file_size = os.path.getsize(filename)
1440 if file_size > (1024 * 1024) * 1024:
1441 _log.debug('file size of [%s] > 1 GB, supposedly not supported by psycopg2 large objects (but seems to work anyway ?)', file_size)
1442
1443
1444 if conn is None:
1445 conn = get_raw_connection(readonly = False)
1446 close_conn = conn.close
1447 else:
1448 close_conn = __noop
1449 _log.debug('[%s] -> large object', filename)
1450
1451
1452 lo = conn.lobject(0, 'w', 0, filename)
1453 lo_oid = lo.oid
1454 lo.close()
1455 _log.debug('large object OID: %s', lo_oid)
1456
1457
1458 if file_md5 is None:
1459 conn.commit()
1460 close_conn()
1461 return lo_oid
1462 cmd = 'SELECT md5(lo_get(%(loid)s::oid))'
1463 args = {'loid': lo_oid}
1464 rows, idx = run_ro_queries(link_obj = conn, queries = [{'cmd': cmd, 'args': args}])
1465 db_md5 = rows[0][0]
1466 if file_md5 == db_md5:
1467 conn.commit()
1468 close_conn()
1469 _log.debug('MD5 sums of data file and database large object match: [file::%s] = [DB::%s]', file_md5, db_md5)
1470 return lo_oid
1471 conn.rollback()
1472 close_conn()
1473 _log.error('MD5 sums of data file and database large object [%s] do not match: [file::%s] <> [DB::%s]', lo_oid, file_md5, db_md5)
1474 return -1
1475
1476
1477 -def file2bytea_copy_from(table=None, columns=None, filename=None, conn=None, md5_query=None, file_md5=None):
1478
1479
1480
1481
1482 chunk_size = 32 * (1024 * 1024)
1483 _log.debug('[%s] (%s bytes) --(%s bytes)-> %s(%s)', filename, os.path.getsize(filename), chunk_size, table, columns)
1484 if conn is None:
1485 conn = get_raw_connection(readonly = False)
1486 close_conn = True
1487 else:
1488 close_conn = False
1489 curs = conn.cursor()
1490
1491 infile = open(filename, "rb")
1492 curs.copy_from(infile, table, size = chunk_size, columns = columns)
1493 infile.close()
1494 curs.close()
1495 if None in [file_md5, md5_query]:
1496 conn.commit()
1497 close_conn()
1498 return True
1499
1500 rows, idx = run_ro_queries(link_obj = conn, queries = [md5_query])
1501 db_md5 = rows[0][0]
1502 if file_md5 == db_md5:
1503 conn.commit()
1504 close_conn()
1505 _log.debug('MD5 sums of data file and database BYTEA field match: [file::%s] = [DB::%s]', file_md5, db_md5)
1506 return True
1507 close_conn()
1508 _log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
1509 return False
1510
1511
1512 -def file2bytea_overlay(query=None, args=None, filename=None, conn=None, md5_query=None, file_md5=None):
1513 """Store data from a file into a bytea field.
1514
1515 The query must:
1516 - 'cmd' must be in unicode
1517 - 'cmd' must contain a format spec identifying the row (eg
1518 a primary key) matching <args> if it is an UPDATE
1519 - 'cmd' must contain "... SET ... <some_bytea_field> = OVERLAY(some_bytea_field PLACING %(data)s::bytea FROM %(start)s FOR %(size)s) ..."
1520 - 'args' must be a dict matching 'cmd'
1521
1522 The query CAN return the MD5 of the inserted data:
1523 RETURNING md5(<field>) AS md5
1524 in which case it will compare it to the md5
1525 of the file.
1526
1527 UPDATE
1528 the_table
1529 SET
1530 bytea_field = OVERLAY (
1531 coalesce(bytea_field, '':bytea),
1532 PLACING
1533 %(data)s::bytea
1534 FROM
1535 %(start)s
1536 FOR
1537 %(size)s
1538 )
1539 WHERE
1540 primary_key = pk_value
1541
1542 SELECT md5(bytea_field) FROM the_table WHERE primary_key = pk_value
1543 """
1544 chunk_size = 32 * (1024 * 1024)
1545 file_size = os.path.getsize(filename)
1546 if file_size <= chunk_size:
1547 chunk_size = file_size
1548 needed_chunks, remainder = divmod(file_size, chunk_size)
1549 _log.debug('file data: %s bytes, chunks: %s, chunk size: %s bytes, remainder: %s bytes', file_size, needed_chunks, chunk_size, remainder)
1550
1551 if conn is None:
1552 conn = get_raw_connection(readonly = False)
1553 close_conn = conn.close
1554 else:
1555 close_conn = __noop
1556
1557 infile = open(filename, "rb")
1558
1559 for chunk_id in range(needed_chunks):
1560 chunk_start = (chunk_id * chunk_size) + 1
1561 args['start'] = chunk_start
1562 args['size'] = chunk_size
1563 data_as_byte_string = infile.read(chunk_size)
1564
1565 args['data'] = memoryview(data_as_byte_string)
1566 del(data_as_byte_string)
1567 try:
1568 rows, idx = run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = False, return_data = False)
1569 except Exception:
1570 _log.exception('cannot write chunk [%s/%s] of size [%s], try decreasing chunk size', chunk_id+1, needed_chunks, chunk_size)
1571 conn.rollback()
1572 close_conn()
1573 infile.close()
1574 raise
1575
1576 if remainder > 0:
1577 chunk_start = (needed_chunks * chunk_size) + 1
1578 args['start'] = chunk_start
1579 args['size'] = remainder
1580 data_as_byte_string = infile.read(remainder)
1581
1582 args['data'] = memoryview(data_as_byte_string)
1583 del(data_as_byte_string)
1584 try:
1585 rows, idx = run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = False, return_data = False)
1586 except Exception:
1587 _log.error('cannot retrieve remaining [%s] bytes' % remainder)
1588 conn.rollback()
1589 close_conn()
1590 infile.close()
1591 raise
1592 infile.close()
1593 if None in [file_md5, md5_query]:
1594 conn.commit()
1595 close_conn()
1596 return True
1597
1598 rows, idx = run_ro_queries(link_obj = conn, queries = [{'cmd': md5_query, 'args': args}])
1599 db_md5 = rows[0][0]
1600 if file_md5 == db_md5:
1601 conn.commit()
1602 close_conn()
1603 _log.debug('MD5 sums of data file and database BYTEA field match: [file::%s] = [DB::%s]', file_md5, db_md5)
1604 return True
1605 close_conn()
1606 _log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
1607 return False
1608
1609
1611
1612 if conn is None:
1613 conn = get_connection(readonly = False)
1614
1615 from Gnumed.pycommon import gmPsql
1616 psql = gmPsql.Psql(conn)
1617
1618 if psql.run(sql_script) == 0:
1619 query = {
1620 'cmd': 'select gm.log_script_insertion(%(name)s, %(ver)s)',
1621 'args': {'name': sql_script, 'ver': 'current'}
1622 }
1623 run_rw_queries(link_obj = conn, queries = [query])
1624 conn.commit()
1625 return True
1626
1627 _log.error('error running sql script: %s', sql_script)
1628 return False
1629
1630
1632 """Escape input for use in a PostgreSQL regular expression.
1633
1634 If a fragment comes from user input and is to be used
1635 as a regular expression we need to make sure it doesn't
1636 contain invalid regex patterns such as unbalanced ('s.
1637
1638 <escape_all>
1639 True: try to escape *all* metacharacters
1640 False: only escape those which render the regex invalid
1641 """
1642 return expression.replace (
1643 '(', '\('
1644 ).replace (
1645 ')', '\)'
1646 ).replace (
1647 '[', '\['
1648 ).replace (
1649 '+', '\+'
1650 ).replace (
1651 '.', '\.'
1652 ).replace (
1653 '*', '\*'
1654 )
1655
1656
1657
1659
1660 tx_status = conn.get_transaction_status()
1661 if tx_status in [ psycopg2.extensions.TRANSACTION_STATUS_INERROR, psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN ]:
1662 isolation_level = '%s (tx aborted or unknown, cannot retrieve)' % conn.isolation_level
1663 else:
1664 isolation_level = '%s (%s)' % (conn.isolation_level, map_psyco_iso_level2str[conn.isolation_level])
1665 conn_status = '%s (%s)' % (conn.status, map_psyco_conn_status2str[conn.status])
1666 if conn.closed != 0:
1667 conn_status = 'undefined (%s)' % conn_status
1668 try:
1669 conn_deferrable = conn.deferrable
1670 except AttributeError:
1671 conn_deferrable = 'unavailable'
1672
1673 d = {
1674 'identity': id(conn),
1675 'backend PID': conn.get_backend_pid(),
1676 'protocol version': conn.protocol_version,
1677 'encoding': conn.encoding,
1678 'closed': conn.closed,
1679 'readonly': conn.readonly,
1680 'autocommit': conn.autocommit,
1681 'isolation level (psyco)': isolation_level,
1682 'async': conn.async_,
1683 'deferrable': conn_deferrable,
1684 'transaction status': '%s (%s)' % (tx_status, map_psyco_tx_status2str[tx_status]),
1685 'connection status': conn_status,
1686 'executing async op': conn.isexecuting(),
1687 'type': type(conn)
1688 }
1689 return '%s\n' % conn + format_dict_like (
1690 d,
1691 relevant_keys = [
1692 'type',
1693 'identity',
1694 'backend PID',
1695 'protocol version',
1696 'encoding',
1697 'isolation level (psyco)',
1698 'readonly',
1699 'autocommit',
1700 'closed',
1701 'connection status',
1702 'transaction status',
1703 'deferrable',
1704 'async',
1705 'executing async op'
1706 ],
1707 tabular = True,
1708 value_delimiters = None
1709 )
1710
1711
1713 conn = cursor.connection
1714
1715 tx_status = conn.get_transaction_status()
1716 if tx_status in [ psycopg2.extensions.TRANSACTION_STATUS_INERROR, psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN ]:
1717 isolation_level = 'tx aborted or unknown, cannot retrieve'
1718 else:
1719 isolation_level = conn.isolation_level
1720 try:
1721 conn_deferrable = conn.deferrable
1722 except AttributeError:
1723 conn_deferrable = 'unavailable'
1724
1725 if cursor.query is None:
1726 query = '<no query>'
1727 else:
1728
1729 query = cursor.query
1730
1731 txt = """Link state:
1732 Cursor
1733 identity: %s; name: %s
1734 closed: %s; scrollable: %s; with hold: %s; arraysize: %s; itersize: %s;
1735 last rowcount: %s; rownumber: %s; lastrowid (OID): %s;
1736 last description: %s
1737 statusmessage: %s
1738 Connection
1739 identity: %s; backend pid: %s; protocol version: %s;
1740 closed: %s; autocommit: %s; isolation level: %s; encoding: %s; async: %s; deferrable: %s; readonly: %s;
1741 TX status: %s; CX status: %s; executing async op: %s;
1742 Query
1743 %s
1744 """ % (
1745 id(cursor),
1746 cursor.name,
1747 cursor.closed,
1748 cursor.scrollable,
1749 cursor.withhold,
1750 cursor.arraysize,
1751 cursor.itersize,
1752 cursor.rowcount,
1753 cursor.rownumber,
1754 cursor.lastrowid,
1755 cursor.description,
1756 cursor.statusmessage,
1757
1758 id(conn),
1759 conn.get_backend_pid(),
1760 conn.protocol_version,
1761 conn.closed,
1762 conn.autocommit,
1763 isolation_level,
1764 conn.encoding,
1765 conn.async_,
1766 conn_deferrable,
1767 conn.readonly,
1768 map_psyco_tx_status2str[tx_status],
1769 map_psyco_conn_status2str[conn.status],
1770 conn.isexecuting(),
1771
1772 query
1773 )
1774 return txt
1775
1776
1777 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1778 """Run read-only queries.
1779
1780 <queries> must be a list of dicts:
1781 [
1782 {'cmd': <string>, 'args': <dict> or <tuple>},
1783 {...},
1784 ...
1785 ]
1786 """
1787 if isinstance(link_obj, dbapi._psycopg.cursor):
1788 curs = link_obj
1789 curs_close = lambda :1
1790 tx_rollback = lambda :1
1791 readonly_rollback_just_in_case = lambda :1
1792 elif isinstance(link_obj, dbapi._psycopg.connection):
1793 curs = link_obj.cursor()
1794 curs_close = curs.close
1795 tx_rollback = link_obj.rollback
1796 if link_obj.autocommit is True:
1797 readonly_rollback_just_in_case = link_obj.rollback
1798 else:
1799
1800
1801
1802 readonly_rollback_just_in_case = lambda :1
1803 elif link_obj is None:
1804 conn = get_connection(readonly=True, verbose=verbose)
1805 curs = conn.cursor()
1806 curs_close = curs.close
1807 tx_rollback = conn.rollback
1808 readonly_rollback_just_in_case = conn.rollback
1809 else:
1810 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj)
1811
1812 if verbose:
1813 _log.debug('cursor: %s', curs)
1814
1815 for query in queries:
1816 try:
1817 args = query['args']
1818 except KeyError:
1819 args = None
1820 try:
1821 curs.execute(query['cmd'], args)
1822 if verbose:
1823 _log.debug(capture_cursor_state(curs))
1824 except dbapi.Error as pg_exc:
1825 _log.error('query failed in RO connection')
1826 _log.error(capture_cursor_state(curs))
1827 if hasattr(pg_exc, 'diag'):
1828 for prop in dir(pg_exc.diag):
1829 if prop.startswith('__'):
1830 continue
1831 val = getattr(pg_exc.diag, prop)
1832 if val is None:
1833 continue
1834 _log.error('PG diags %s: %s', prop, val)
1835 pg_exc = make_pg_exception_fields_unicode(pg_exc)
1836 _log.error('PG error code: %s', pg_exc.pgcode)
1837 if pg_exc.pgerror is not None:
1838 _log.error('PG error message: %s', pg_exc.u_pgerror)
1839 try:
1840 curs_close()
1841 except dbapi.InterfaceError:
1842 _log.exception('cannot close cursor')
1843 tx_rollback()
1844 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE:
1845 details = 'Query: [%s]' % curs.query.strip().strip('\n').strip().strip('\n')
1846 if curs.statusmessage != '':
1847 details = 'Status: %s\n%s' % (
1848 curs.statusmessage.strip().strip('\n').strip().strip('\n'),
1849 details
1850 )
1851 if pg_exc.pgerror is None:
1852 msg = '[%s]' % pg_exc.pgcode
1853 else:
1854 msg = '[%s]: %s' % (pg_exc.pgcode, pg_exc.u_pgerror)
1855 raise gmExceptions.AccessDenied (
1856 msg,
1857 source = 'PostgreSQL',
1858 code = pg_exc.pgcode,
1859 details = details
1860 )
1861 raise
1862 except:
1863 _log.exception('query failed in RO connection')
1864 _log.error(capture_cursor_state(curs))
1865 try:
1866 curs_close()
1867 except dbapi.InterfaceError:
1868 _log.exception('cannot close cursor')
1869 tx_rollback()
1870 raise
1871
1872 data = None
1873 col_idx = None
1874 if return_data:
1875 data = curs.fetchall()
1876 if verbose:
1877 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data))
1878 _log.debug('cursor description: %s', curs.description)
1879 if get_col_idx:
1880 col_idx = get_col_indices(curs)
1881
1882 curs_close()
1883
1884
1885
1886 readonly_rollback_just_in_case()
1887 return (data, col_idx)
1888
1889
1890 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1891 """Convenience function for running a transaction
1892 that is supposed to get committed.
1893
1894 <link_obj>
1895 can be either:
1896 - a cursor
1897 - a connection
1898
1899 <queries>
1900 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>)
1901 to be executed as a single transaction, the last
1902 query may usefully return rows (such as a
1903 "SELECT currval('some_sequence')" statement)
1904
1905 <end_tx>
1906 - controls whether the transaction is finalized (eg.
1907 committed/rolled back) or not, this allows the
1908 call to run_rw_queries() to be part of a framing
1909 transaction
1910 - if link_obj is a connection then <end_tx> will
1911 default to False unless it is explicitly set to
1912 True which is taken to mean "yes, you do have full
1913 control over the transaction" in which case the
1914 transaction is properly finalized
1915 - if link_obj is a cursor we CANNOT finalize the
1916 transaction because we would need the connection for that
1917 - if link_obj is None <end_tx> will, of course, always be True
1918
1919 <return_data>
1920 - if true, the returned data will include the rows
1921 the last query selected
1922 - if false, it returns None instead
1923
1924 <get_col_idx>
1925 - if true, the returned data will include a dictionary
1926 mapping field names to column positions
1927 - if false, the returned data returns None instead
1928
1929 method result:
1930 - returns a tuple (data, idx)
1931 - <data>:
1932 * (None, None) if last query did not return rows
1933 * ("fetchall() result", <index>) if last query returned any rows
1934 * for <index> see <get_col_idx>
1935 """
1936 if isinstance(link_obj, dbapi._psycopg.cursor):
1937 conn_close = lambda :1
1938 conn_commit = lambda :1
1939 tx_rollback = lambda :1
1940 curs = link_obj
1941 curs_close = lambda :1
1942 notices_accessor = curs.connection
1943 elif isinstance(link_obj, dbapi._psycopg.connection):
1944 conn_close = lambda :1
1945 if end_tx:
1946 conn_commit = link_obj.commit
1947 tx_rollback = link_obj.rollback
1948 else:
1949 conn_commit = lambda :1
1950 tx_rollback = lambda :1
1951 curs = link_obj.cursor()
1952 curs_close = curs.close
1953 notices_accessor = link_obj
1954 elif link_obj is None:
1955 conn = get_connection(readonly=False)
1956 conn_close = conn.close
1957 conn_commit = conn.commit
1958 tx_rollback = conn.rollback
1959 curs = conn.cursor()
1960 curs_close = curs.close
1961 notices_accessor = conn
1962 else:
1963 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj)
1964
1965 for query in queries:
1966 try:
1967 args = query['args']
1968 except KeyError:
1969 args = None
1970 try:
1971 curs.execute(query['cmd'], args)
1972 if verbose:
1973 _log.debug(capture_cursor_state(curs))
1974 for notice in notices_accessor.notices:
1975 _log.debug(notice.replace('\n', '/').replace('\n', '/'))
1976 del notices_accessor.notices[:]
1977
1978 except dbapi.Error as pg_exc:
1979 _log.error('query failed in RW connection')
1980 _log.error(capture_cursor_state(curs))
1981 if hasattr(pg_exc, 'diag'):
1982 for prop in dir(pg_exc.diag):
1983 if prop.startswith('__'):
1984 continue
1985 val = getattr(pg_exc.diag, prop)
1986 if val is None:
1987 continue
1988 _log.error('PG diags %s: %s', prop, val)
1989 for notice in notices_accessor.notices:
1990 _log.debug(notice.replace('\n', '/').replace('\n', '/'))
1991 del notices_accessor.notices[:]
1992 pg_exc = make_pg_exception_fields_unicode(pg_exc)
1993 _log.error('PG error code: %s', pg_exc.pgcode)
1994 if pg_exc.pgerror is not None:
1995 _log.error('PG error message: %s', pg_exc.u_pgerror)
1996
1997 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE:
1998 details = 'Query: [%s]' % curs.query.strip().strip('\n').strip().strip('\n')
1999 if curs.statusmessage != '':
2000 details = 'Status: %s\n%s' % (
2001 curs.statusmessage.strip().strip('\n').strip().strip('\n'),
2002 details
2003 )
2004 if pg_exc.pgerror is None:
2005 msg = '[%s]' % pg_exc.pgcode
2006 else:
2007 msg = '[%s]: %s' % (pg_exc.pgcode, pg_exc.u_pgerror)
2008 try:
2009 curs_close()
2010 tx_rollback()
2011 conn_close()
2012 except dbapi.InterfaceError:
2013 _log.exception('cannot cleanup')
2014 raise gmExceptions.AccessDenied (
2015 msg,
2016 source = 'PostgreSQL',
2017 code = pg_exc.pgcode,
2018 details = details
2019 )
2020
2021 gmLog2.log_stack_trace()
2022 try:
2023 curs_close()
2024 tx_rollback()
2025 conn_close()
2026 except dbapi.InterfaceError:
2027 _log.exception('cannot cleanup')
2028 raise
2029
2030 except:
2031 _log.exception('error running query in RW connection')
2032 _log.error(capture_cursor_state(curs))
2033 for notice in notices_accessor.notices:
2034 _log.debug(notice.replace('\n', '/').replace('\n', '/'))
2035 del notices_accessor.notices[:]
2036 gmLog2.log_stack_trace()
2037 try:
2038 curs_close()
2039 tx_rollback()
2040 conn_close()
2041 except dbapi.InterfaceError:
2042 _log.exception('cannot cleanup')
2043 raise
2044
2045 data = None
2046 col_idx = None
2047 if return_data:
2048 try:
2049 data = curs.fetchall()
2050 except:
2051 _log.exception('error fetching data from RW query')
2052 gmLog2.log_stack_trace()
2053 try:
2054 curs_close()
2055 tx_rollback()
2056 conn_close()
2057 except dbapi.InterfaceError:
2058 _log.exception('cannot cleanup')
2059 raise
2060 raise
2061 if get_col_idx:
2062 col_idx = get_col_indices(curs)
2063
2064 curs_close()
2065 conn_commit()
2066 conn_close()
2067
2068 return (data, col_idx)
2069
2070
2071 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
2072 """Generates SQL for an INSERT query.
2073
2074 values: dict of values keyed by field to insert them into
2075 """
2076 if schema is None:
2077 schema = 'public'
2078
2079 fields = values.keys()
2080 val_snippets = []
2081 for field in fields:
2082 val_snippets.append('%%(%s)s' % field)
2083
2084 if returning is None:
2085 returning = ''
2086 return_data = False
2087 else:
2088 returning = '\n\tRETURNING\n\t\t%s' % ', '.join(returning)
2089 return_data = True
2090
2091 cmd = """\nINSERT INTO %s.%s (
2092 %s
2093 ) VALUES (
2094 %s
2095 )%s""" % (
2096 schema,
2097 table,
2098 ',\n\t\t'.join(fields),
2099 ',\n\t\t'.join(val_snippets),
2100 returning
2101 )
2102
2103 _log.debug('running SQL: >>>%s<<<', cmd)
2104
2105 return run_rw_queries (
2106 link_obj = link_obj,
2107 queries = [{'cmd': cmd, 'args': values}],
2108 end_tx = end_tx,
2109 return_data = return_data,
2110 get_col_idx = get_col_idx,
2111 verbose = verbose
2112 )
2113
2114
2115
2116
2118 """GNUmed database connection pool.
2119
2120 Extends psycopg2's ThreadedConnectionPool with
2121 a custom _connect() function. Supports one connection
2122 per thread - which also ties it to one particular DSN."""
2123
2125 _log.debug('conn request with key [%s]', key)
2126 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly = True)
2127
2128 conn.original_close = conn.close
2129 conn.close = _raise_exception_on_ro_conn_close
2130 if key is not None:
2131 self._used[key] = conn
2132 self._rused[id(conn)] = key
2133 else:
2134 self._pool.append(conn)
2135 return conn
2136
2137
2139 if key is None:
2140 key = threading.current_thread().ident
2141 try:
2142 conn = self._used[key]
2143 except KeyError:
2144 _log.error('no such key in connection pool: %s', key)
2145 _log.debug('available keys: %s', self._used.keys())
2146 return
2147 del self._used[key]
2148 del self._rused[id(conn)]
2149 conn.original_close()
2150
2151
2153 for conn_key in self._used.keys():
2154 conn = self._used[conn_key]
2155 if conn.closed != 0:
2156 continue
2157 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid())
2158 conn.original_close()
2159
2160
2161 -def get_raw_connection(dsn=None, verbose=False, readonly=True, connection_name=None, autocommit=False):
2162 """Get a raw, unadorned connection.
2163
2164 - this will not set any parameters such as encoding, timezone, datestyle
2165 - the only requirement is a valid DSN
2166 - hence it can be used for "service" connections
2167 for verifying encodings etc
2168 """
2169
2170 if dsn is None:
2171 dsn = get_default_dsn()
2172
2173 if 'host=salaam.homeunix' in dsn:
2174 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.')
2175
2176
2177
2178
2179 if ' client_encoding=' not in dsn:
2180 dsn += ' client_encoding=utf8'
2181
2182 if ' application_name' not in dsn:
2183 if connection_name is None:
2184 dsn += " application_name=GNUmed-[%s]" % threading.current_thread().name.replace(' ', '_')
2185 else:
2186 dsn += " application_name=%s" % connection_name
2187
2188 try:
2189
2190 conn = dbapi.connect(dsn = dsn, connection_factory = psycopg2.extras.DictConnection)
2191 except dbapi.OperationalError as e:
2192 t, v, tb = sys.exc_info()
2193 try:
2194 msg = e.args[0]
2195 except (AttributeError, IndexError, TypeError):
2196 raise
2197 if 'fe_sendauth' in msg:
2198 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2199 if regex.search('user ".*" does not exist', msg) is not None:
2200 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2201 if (( (regex.search('user ".*"', msg) is not None)
2202 or
2203 (regex.search('(R|r)ol{1,2}e', msg) is not None)
2204 )
2205 and ('exist' in msg)
2206 and (regex.search('n(o|ich)t', msg) is not None)
2207 ):
2208 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2209 if regex.search('user ".*" does not exist', msg) is not None:
2210 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2211 if 'uthenti' in msg:
2212 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2213 raise
2214
2215 if connection_name is None:
2216 _log.debug('established anonymous database connection, backend PID: %s', conn.get_backend_pid())
2217 else:
2218 _log.debug('established database connection "%s", backend PID: %s', connection_name, conn.get_backend_pid())
2219
2220
2221
2222 global postgresql_version
2223 if postgresql_version is None:
2224 curs = conn.cursor()
2225 curs.execute("""
2226 SELECT
2227 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version
2228 FROM
2229 pg_settings
2230 WHERE
2231 name = 'server_version'
2232 """)
2233 postgresql_version = curs.fetchone()['version']
2234 _log.info('PostgreSQL version (numeric): %s' % postgresql_version)
2235 try:
2236 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))")
2237 _log.info('database size: %s', curs.fetchone()[0])
2238 except BaseException:
2239 _log.exception('cannot get database size')
2240 finally:
2241 curs.close()
2242 conn.commit()
2243 if verbose:
2244 curs = conn.cursor()
2245 _log_PG_settings(curs = curs)
2246 curs.close()
2247
2248 if _default_client_timezone is None:
2249 __detect_client_timezone(conn = conn)
2250
2251
2252 if readonly:
2253 _log.debug('readonly: forcing autocommit=True to avoid <IDLE IN TRANSACTION>')
2254 autocommit = True
2255 else:
2256 _log.debug('autocommit is desired to be: %s', autocommit)
2257
2258 conn.commit()
2259 conn.autocommit = autocommit
2260 conn.readonly = readonly
2261
2262
2263 if verbose:
2264 _log.debug('enabling <plpgsql.extra_warnings/_errors>')
2265 curs = conn.cursor()
2266 try:
2267 curs.execute("SET plpgsql.extra_warnings TO 'all'")
2268 curs.execute("SET plpgsql.extra_errors TO 'all'")
2269 except BaseException:
2270 _log.exception('cannot enable <plpgsql.extra_warnings/_errors>')
2271 finally:
2272 curs.close()
2273 conn.commit()
2274
2275 conn.is_decorated = False
2276 return conn
2277
2278
2279 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True, connection_name=None, autocommit=False):
2280 """Get a new connection.
2281
2282 This assumes the locale system has been initialized
2283 unless an encoding is specified.
2284 """
2285
2286
2287 if pooled and readonly and (dsn is None):
2288 global __ro_conn_pool
2289 if __ro_conn_pool is None:
2290 log_ro_conn = True
2291 __ro_conn_pool = cConnectionPool (
2292 minconn = 1,
2293 maxconn = 2,
2294 dsn = dsn,
2295 verbose = verbose
2296 )
2297 else:
2298 log_ro_conn = False
2299 try:
2300 conn = __ro_conn_pool.getconn()
2301 except psycopg2.pool.PoolError:
2302 _log.exception('falling back to non-pooled connection')
2303 conn = get_raw_connection(dsn = dsn, verbose = verbose, readonly = readonly, connection_name = connection_name, autocommit = autocommit)
2304 log_ro_conn = True
2305 if log_ro_conn:
2306 [ _log.debug(line) for line in capture_conn_state(conn = conn).split('\n') ]
2307 else:
2308 conn = get_raw_connection(dsn = dsn, verbose = verbose, readonly = readonly, connection_name = connection_name, autocommit = autocommit)
2309
2310 if conn.is_decorated:
2311 return conn
2312
2313 if encoding is None:
2314 encoding = _default_client_encoding
2315 if encoding is None:
2316 encoding = gmI18N.get_encoding()
2317 _log.warning('client encoding not specified')
2318 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding)
2319 _log.warning('for this to work properly the application MUST have called locale.setlocale() before')
2320
2321
2322
2323 try:
2324 conn.set_client_encoding(encoding)
2325 except dbapi.DataError:
2326 t, v, tb = sys.exc_info()
2327
2328 if 'cannot set encoding to' in str(v):
2329 raise cEncodingError(encoding, v).with_traceback(tb)
2330 if 'invalid value for parameter "client_encoding"' in str(v):
2331 raise cEncodingError(encoding, v).with_traceback(tb)
2332 raise
2333
2334
2335 if readonly:
2336
2337 pass
2338 else:
2339 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE)
2340
2341 _log.debug('client time zone [%s]', _default_client_timezone)
2342
2343
2344 curs = conn.cursor()
2345 curs.execute(_sql_set_timezone, [_default_client_timezone])
2346 curs.close()
2347 conn.commit()
2348
2349 conn.is_decorated = True
2350
2351 if verbose:
2352 [ _log.debug(line) for line in capture_conn_state(conn = conn).split('\n') ]
2353
2354 return conn
2355
2356
2361
2362
2367
2368
2369
2370
2373
2374
2376 raise TypeError('close() called on read-only connection')
2377
2378
2380 run_insert (
2381 schema = 'gm',
2382 table = 'access_log',
2383 values = {'user_action': action},
2384 end_tx = True
2385 )
2386
2387
2389 """Check server time and local time to be within
2390 the given tolerance of each other.
2391
2392 tolerance: seconds
2393 """
2394 _log.debug('maximum skew tolerance (seconds): %s', tolerance)
2395
2396 cmd = "SELECT now() at time zone 'UTC'"
2397 conn = get_raw_connection(readonly=True)
2398 curs = conn.cursor()
2399
2400 start = time.time()
2401 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}])
2402 end = time.time()
2403 client_now_as_utc = pydt.datetime.utcnow()
2404
2405 curs.close()
2406 conn.commit()
2407
2408 server_now_as_utc = rows[0][0]
2409 query_duration = end - start
2410 _log.info('server "now" (UTC): %s', server_now_as_utc)
2411 _log.info('client "now" (UTC): %s', client_now_as_utc)
2412 _log.debug('wire roundtrip (seconds): %s', query_duration)
2413
2414 if query_duration > tolerance:
2415 _log.error('useless to check client/server time skew, wire roundtrip > tolerance')
2416 return False
2417
2418 if server_now_as_utc > client_now_as_utc:
2419 real_skew = server_now_as_utc - client_now_as_utc
2420 else:
2421 real_skew = client_now_as_utc - server_now_as_utc
2422
2423 _log.debug('client/server time skew: %s', real_skew)
2424
2425 if real_skew > pydt.timedelta(seconds = tolerance):
2426 _log.error('client/server time skew > tolerance')
2427 return False
2428
2429 return True
2430
2431
2433 """Checks database settings.
2434
2435 returns (status, message)
2436 status:
2437 0: no problem
2438 1: non-fatal problem
2439 2: fatal problem
2440 """
2441 _log.debug('checking database settings')
2442
2443 conn = get_connection()
2444
2445
2446 global postgresql_version_string
2447 if postgresql_version_string is None:
2448 curs = conn.cursor()
2449 curs.execute('SELECT version()')
2450 postgresql_version_string = curs.fetchone()['version']
2451 curs.close()
2452 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string)
2453
2454 options2check = {
2455
2456 'allow_system_table_mods': [['off'], 'system breakage', False],
2457 'check_function_bodies': [['on'], 'suboptimal error detection', False],
2458 'datestyle': [['ISO'], 'faulty timestamp parsing', True],
2459 'default_transaction_isolation': [['read committed'], 'faulty database reads', True],
2460 'default_transaction_read_only': [['on'], 'accidental database writes', False],
2461 'fsync': [['on'], 'data loss/corruption', True],
2462 'full_page_writes': [['on'], 'data loss/corruption', False],
2463 'lc_messages': [['C'], 'suboptimal error detection', False],
2464 'password_encryption': [['on', 'md5', 'scram-sha-256'], 'breach of confidentiality', False],
2465
2466 'synchronous_commit': [['on'], 'data loss/corruption', False],
2467 'sql_inheritance': [['on'], 'query breakage, data loss/corruption', True],
2468 'ignore_checksum_failure': [['off'], 'data loss/corruption', False],
2469 'track_commit_timestamp': [['on'], 'suboptimal auditing', False]
2470 }
2471
2472 from Gnumed.pycommon import gmCfg2
2473 _cfg = gmCfg2.gmCfgData()
2474 if _cfg.get(option = 'hipaa'):
2475 options2check['log_connections'] = [['on'], 'non-compliance with HIPAA', True]
2476 options2check['log_disconnections'] = [['on'], 'non-compliance with HIPAA', True]
2477 else:
2478 options2check['log_connections'] = [['on'], 'non-compliance with HIPAA', None]
2479 options2check['log_disconnections'] = [['on'], 'non-compliance with HIPAA', None]
2480
2481 cmd = "SELECT name, setting from pg_settings where name in %(settings)s"
2482 rows, idx = run_ro_queries (
2483 link_obj = conn,
2484 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}],
2485 get_col_idx = False
2486 )
2487
2488 found_error = False
2489 found_problem = False
2490 msg = []
2491 for row in rows:
2492 option = row['name']
2493 value_found = row['setting']
2494 values_expected = options2check[option][0]
2495 risk = options2check[option][1]
2496 fatal_setting = options2check[option][2]
2497 if value_found not in values_expected:
2498 if fatal_setting is True:
2499 found_error = True
2500 elif fatal_setting is False:
2501 found_problem = True
2502 elif fatal_setting is None:
2503 pass
2504 else:
2505 _log.error(options2check[option])
2506 raise ValueError('invalid database configuration sanity check')
2507 msg.append(_(' option [%s]: %s') % (option, value_found))
2508 msg.append(_(' risk: %s') % risk)
2509 _log.warning('PG option [%s] set to [%s], expected %s, risk: <%s>' % (option, value_found, values_expected, risk))
2510
2511 if found_error:
2512 return 2, '\n'.join(msg)
2513
2514 if found_problem:
2515 return 1, '\n'.join(msg)
2516
2517 return 0, ''
2518
2519
2521
2522
2523 try:
2524
2525
2526 curs.execute('SELECT name, setting, unit, source, reset_val, sourcefile, sourceline FROM pg_settings')
2527 except:
2528 _log.exception('cannot log PG settings ("SELECT ... FROM pg_settings" failed)')
2529 return False
2530 settings = curs.fetchall()
2531 for setting in settings:
2532 if setting['unit'] is None:
2533 unit = ''
2534 else:
2535 unit = ' %s' % setting['unit']
2536 if setting['sourcefile'] is None:
2537 sfile = ''
2538 else:
2539 sfile = '// %s @ %s' % (setting['sourcefile'], setting['sourceline'])
2540
2541
2542
2543
2544
2545
2546 _log.debug('%s: %s%s (set from: [%s] // sess RESET will set to: [%s]%s)',
2547 setting['name'],
2548 setting['setting'],
2549 unit,
2550 setting['source'],
2551 setting['reset_val'],
2552
2553 sfile
2554 )
2555
2556 try:
2557 curs.execute('select pg_available_extensions()')
2558 except:
2559 _log.exception('cannot log available PG extensions')
2560 return False
2561 extensions = curs.fetchall()
2562 if extensions is None:
2563 _log.error('no PG extensions available')
2564 return False
2565 for ext in extensions:
2566 _log.debug('PG extension: %s', ext['pg_available_extensions'])
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579 return True
2580
2581
2583
2584 if not isinstance(exc, dbapi.Error):
2585 return exc
2586
2587 if exc.pgerror is None:
2588 try:
2589 msg = exc.args[0]
2590 except (AttributeError, IndexError, TypeError):
2591 return exc
2592
2593 exc.u_pgerror = msg
2594 return exc
2595
2596
2597 exc.u_pgerror = exc.pgerror.strip().strip('\n').strip().strip('\n')
2598
2599 return exc
2600
2601
2603
2604 try:
2605 return '%s' % exc.args[0]
2606 except (AttributeError, IndexError, TypeError):
2607 return 'cannot extract message from exception'
2608
2609
2611
2612 - def __init__(self, dsn=None, prev_val=None):
2613 self.dsn = dsn
2614 self.prev_val = prev_val
2615
2617 return 'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
2618
2619
2620
2621
2623
2624 - def __init__(self, encoding=None, prev_val=None):
2625 self.encoding = encoding
2626 self.prev_val = prev_val
2627
2629 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
2630
2631
2632
2633
2634
2636
2638 if dt.tzinfo is None:
2639 raise ValueError('datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat())
2640 self.__dt = dt
2641
2644
2645
2646
2647
2648
2649
2650 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
2651 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY)
2652
2653
2654
2655 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime)
2656
2657
2658
2659
2660
2661
2662
2663
2664 if __name__ == "__main__":
2665
2666 if len(sys.argv) < 2:
2667 sys.exit()
2668
2669 if sys.argv[1] != 'test':
2670 sys.exit()
2671
2672 from Gnumed.pycommon.gmTools import file2md5
2673
2674 logging.basicConfig(level=logging.DEBUG)
2675
2676
2678 run_rw_queries(queries = [
2679 {'cmd': 'drop table if exists test_bytea'},
2680 {'cmd': 'create table test_bytea (data bytea)'}
2681 ])
2682
2683 try:
2684 file2bytea(query = 'insert into test_bytea values (%(data)s::bytea) returning md5(data) as md5', filename = sys.argv[2], file_md5 = file2md5(sys.argv[2], True))
2685 except:
2686 _log.exception('error')
2687
2688 run_rw_queries(queries = [
2689 {'cmd': 'drop table test_bytea'}
2690 ])
2691
2692
2699
2700
2701
2702
2703
2704
2706
2707 run_rw_queries(queries = [
2708 {'cmd': 'drop table if exists test_bytea'},
2709 {'cmd': 'create table test_bytea (pk serial primary key, data bytea)'},
2710 {'cmd': "insert into test_bytea (data) values (NULL::bytea)"}
2711 ])
2712
2713 md5_query = {
2714 'cmd': 'select md5(data) AS md5 FROM test_bytea WHERE pk = %(pk)s',
2715 'args': {'pk': 1}
2716 }
2717
2718 file2bytea_copy_from (
2719 table = 'test_bytea',
2720 columns = ['data'],
2721 filename = sys.argv[2],
2722 md5_query = md5_query,
2723 file_md5 = file2md5(sys.argv[2], True)
2724 )
2725
2726 run_rw_queries(queries = [
2727 {'cmd': 'drop table if exists test_bytea'}
2728 ])
2729
2730
2732
2733 run_rw_queries(queries = [
2734 {'cmd': 'drop table if exists test_bytea'},
2735 {'cmd': 'create table test_bytea (pk serial primary key, data bytea)'},
2736 {'cmd': "insert into test_bytea (data) values (NULL::bytea)"}
2737 ])
2738
2739 cmd = """
2740 update test_bytea
2741 set data = overlay (
2742 coalesce(data, ''::bytea)
2743 placing %(data)s::bytea
2744 from %(start)s
2745 for %(size)s
2746 )
2747 where
2748 pk > %(pk)s
2749 """
2750 md5_cmd = 'select md5(data) from test_bytea'
2751 args = {'pk': 0}
2752 file2bytea_overlay (
2753 query = cmd,
2754 args = args,
2755 filename = sys.argv[2],
2756 conn = None,
2757 md5_query = md5_cmd,
2758 file_md5 = file2md5(sys.argv[2], True)
2759 )
2760
2761 run_rw_queries(queries = [
2762 {'cmd': 'drop table test_bytea'}
2763 ])
2764
2765
2767 print("testing get_connection()")
2768
2769 dsn = 'foo'
2770 try:
2771 conn = get_connection(dsn=dsn)
2772 except dbapi.ProgrammingError as e:
2773 print("1) SUCCESS: get_connection(%s) failed as expected" % dsn)
2774 t, v = sys.exc_info()[:2]
2775 print (' ', t)
2776 print (' ', v)
2777
2778 dsn = 'dbname=gnumed_v9'
2779 try:
2780 conn = get_connection(dsn=dsn)
2781 except cAuthenticationError:
2782 print("2) SUCCESS: get_connection(%s) failed as expected" % dsn)
2783 t, v = sys.exc_info()[:2]
2784 print(' ', t)
2785 print(' ', v)
2786
2787 dsn = 'dbname=gnumed_v9 user=abc'
2788 try:
2789 conn = get_connection(dsn=dsn)
2790 except cAuthenticationError:
2791 print("3) SUCCESS: get_connection(%s) failed as expected" % dsn)
2792 t, v = sys.exc_info()[:2]
2793 print(' ', t)
2794 print(' ', v)
2795
2796 dsn = 'dbname=gnumed_v22 user=any-doc'
2797 try:
2798 conn = get_connection(dsn=dsn)
2799 print("4) SUCCESS:", dsn)
2800 except cAuthenticationError:
2801 print("4) SUCCESS: get_connection(%s) failed as expected" % dsn)
2802 t, v = sys.exc_info()[:2]
2803 print(' ', t)
2804 print(' ', v)
2805
2806 dsn = 'dbname=gnumed_v9 user=any-doc password=abc'
2807 try:
2808 conn = get_connection(dsn=dsn)
2809 except cAuthenticationError:
2810 print("5) SUCCESS: get_connection(%s) failed as expected" % dsn)
2811 t, v = sys.exc_info()[:2]
2812 print(' ', t)
2813 print(' ', v)
2814
2815 dsn = 'dbname=gnumed_v22 user=any-doc password=any-doc'
2816 conn = get_connection(dsn=dsn, readonly=True)
2817
2818 dsn = 'dbname=gnumed_v22 user=any-doc password=any-doc'
2819 conn = get_connection(dsn=dsn, readonly=False, verbose=True)
2820
2821 dsn = 'dbname=gnumed_v22 user=any-doc password=any-doc'
2822 encoding = 'foo'
2823 try:
2824 conn = get_connection(dsn=dsn, encoding=encoding)
2825 except cEncodingError:
2826 print("6) SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding))
2827 t, v = sys.exc_info()[:2]
2828 print(' ', t)
2829 print(' ', v)
2830
2831
2833 print("testing exceptions")
2834
2835 try:
2836 raise cAuthenticationError('no dsn', 'no previous exception')
2837 except cAuthenticationError:
2838 t, v, tb = sys.exc_info()
2839 print(t)
2840 print(v)
2841 print(tb)
2842
2843 try:
2844 raise cEncodingError('no dsn', 'no previous exception')
2845 except cEncodingError:
2846 t, v, tb = sys.exc_info()
2847 print(t)
2848 print(v)
2849 print(tb)
2850
2852 print("testing run_ro_queries()")
2853
2854 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
2855 conn = get_connection(dsn, readonly=True)
2856
2857 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': 'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True)
2858 print(data)
2859 print(idx)
2860 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': 'SELECT 1'}], return_data=True, get_col_idx=True)
2861 print(data)
2862 print(idx)
2863
2864 curs = conn.cursor()
2865
2866 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': 'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True)
2867 print(data)
2868 print(idx)
2869
2870 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': 'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True)
2871 print(data)
2872 print(idx)
2873
2874 try:
2875 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': 'selec 1'}], return_data=True, get_col_idx=True, verbose=True)
2876 print(data)
2877 print(idx)
2878 except psycopg2.ProgrammingError:
2879 print('SUCCESS: run_ro_queries("selec 1") failed as expected')
2880 t, v = sys.exc_info()[:2]
2881 print(' ', t)
2882 print(' ', v)
2883
2884 curs.close()
2885
2890
2892 print("testing set_default_client_encoding()")
2893
2894 enc = 'foo'
2895 try:
2896 set_default_client_encoding(enc)
2897 print("SUCCESS: encoding [%s] worked" % enc)
2898 except ValueError:
2899 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2900 t, v = sys.exc_info()[:2]
2901 print(' ', t)
2902 print(' ', v)
2903
2904 enc = ''
2905 try:
2906 set_default_client_encoding(enc)
2907 print("SUCCESS: encoding [%s] worked" % enc)
2908 except ValueError:
2909 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2910 t, v = sys.exc_info()[:2]
2911 print(' ', t)
2912 print(' ', v)
2913
2914 enc = 'latin1'
2915 try:
2916 set_default_client_encoding(enc)
2917 print("SUCCESS: encoding [%s] worked" % enc)
2918 except ValueError:
2919 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2920 t, v = sys.exc_info()[:2]
2921 print(' ', t)
2922 print(' ', v)
2923
2924 enc = 'utf8'
2925 try:
2926 set_default_client_encoding(enc)
2927 print("SUCCESS: encoding [%s] worked" % enc)
2928 except ValueError:
2929 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2930 t, v = sys.exc_info()[:2]
2931 print(' ', t)
2932 print(' ', v)
2933
2934 enc = 'unicode'
2935 try:
2936 set_default_client_encoding(enc)
2937 print("SUCCESS: encoding [%s] worked" % enc)
2938 except ValueError:
2939 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2940 t, v = sys.exc_info()[:2]
2941 print(' ', t)
2942 print(' ', v)
2943
2944 enc = 'UNICODE'
2945 try:
2946 set_default_client_encoding(enc)
2947 print("SUCCESS: encoding [%s] worked" % enc)
2948 except ValueError:
2949 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2950 t, v = sys.exc_info()[:2]
2951 print(' ', t)
2952 print(' ', v)
2953
2962
2964 dsn = get_default_dsn()
2965 conn = get_connection(dsn, readonly=True)
2966 curs = conn.cursor()
2967 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2968
2970 tests = [
2971 ['(', '\\(']
2972 , ['[', '\\[']
2973 , [')', '\\)']
2974 ]
2975 for test in tests:
2976 result = sanitize_pg_regex(test[0])
2977 if result != test[1]:
2978 print('ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1]))
2979
2981 status = True
2982 tests = [
2983 [None, True],
2984 [1, True],
2985 ['1', True],
2986 ['abc', False]
2987 ]
2988
2989 if not is_pg_interval():
2990 print('ERROR: is_pg_interval() returned "False", expected "True"')
2991 status = False
2992
2993 for test in tests:
2994 result = is_pg_interval(test[0])
2995 if result != test[1]:
2996 print('ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]))
2997 status = False
2998
2999 return status
3000
3003
3004
3006 print(get_foreign_key_names (
3007 src_schema = 'clin',
3008 src_table = 'vaccination',
3009 src_column = 'fk_episode',
3010 target_schema = 'clin',
3011 target_table = 'episode',
3012 target_column = 'pk'
3013 ))
3014
3015
3017 schema = 'clin'
3018 table = 'episode'
3019 col = 'pk'
3020 print('column %s.%s.%s is referenced by:' % (schema, table, col))
3021 for row in get_foreign_keys2column (
3022 schema = schema,
3023 table = table,
3024 column = col
3025 ):
3026 print(' <- %s.%s' % (
3027 row['referencing_table'],
3028 row['referencing_column']
3029 ))
3030
3031
3033
3034 tests = [
3035
3036 [None, 'de_DE', True],
3037 [None, 'lang_w/o_tx', False],
3038 [None, None, True],
3039
3040 ['any-doc', 'de_DE', True],
3041 ['any-doc', 'lang_w/o_tx', False],
3042 ['any-doc', None, True],
3043
3044 ['invalid user', 'de_DE', None],
3045 ['invalid user', 'lang_w/o_tx', False],
3046 ['invalid user', None, True]
3047 ]
3048 for test in tests:
3049 try:
3050 result = set_user_language(user = test[0], language = test[1])
3051 if result != test[2]:
3052 print("test:", test)
3053 print("result:", result, "expected:", test[2])
3054 except psycopg2.IntegrityError as e:
3055 if test[2] is None:
3056 continue
3057 print("test:", test)
3058 print("expected exception")
3059 print("result:", e)
3060
3061
3063 for line in get_schema_revision_history():
3064 print(' - '.join(line))
3065
3066
3068 gmDateTime.init()
3069 args = {'dt': gmDateTime.pydt_max_here()}
3070 cmd = "SELECT %(dt)s"
3071
3072
3073
3074 cmd = """
3075 SELECT to_timestamp (foofoo,'YYMMDD.HH24MI') FROM (
3076 SELECT REGEXP_REPLACE (
3077 't1.130729.0902.tif', -- string
3078 E'(.1)\.([0-9\.]+)(\.tif)', -- pattern
3079 E'\\\\2' -- replacement
3080 ) AS foofoo
3081 ) AS foo"""
3082 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
3083 print(rows)
3084 print(rows[0])
3085 print(rows[0][0])
3086
3089
3091 row_is_locked(table = 'dem.identity', pk = 12)
3092
3093 print("1st connection:")
3094 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3095 print(" 1st shared lock succeeded:", lock_row(table = 'dem.identity', pk = 12, exclusive = False))
3096 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3097
3098 print(" 2nd shared lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = False))
3099 print(" `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False))
3100 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3101 print(" exclusive lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = True))
3102 print(" `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = True))
3103 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3104
3105 print("2nd connection:")
3106 conn = get_raw_connection(readonly=True)
3107 print(" shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3108 print(" `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3109 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3110 print(" exclusive lock succeeded ?", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True), "(should fail)")
3111 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3112
3113 print("1st connection:")
3114 print(" unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False))
3115 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3116
3117 print("2nd connection:")
3118 print(" exclusive lock should succeed", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True))
3119 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3120 print(" shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3121 print(" `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3122 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3123 print(" unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3124 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3125
3126 conn.close()
3127
3128
3130 print(get_foreign_key_names (
3131 src_schema = 'dem',
3132 src_table = 'names',
3133 src_column = 'id_identity',
3134 target_schema = 'dem',
3135 target_table = 'identity',
3136 target_column = 'pk'
3137 ))
3138
3139
3141 print(get_index_name(indexed_table = 'clin.vaccination', indexed_column = 'fk_episode'))
3142
3143
3146
3147
3151
3152
3153
3154 test_get_connection()
3155
3156
3157
3158
3159
3160
3161
3162
3163
3164
3165
3166
3167
3168
3169
3170
3171
3172
3173
3174
3175
3176
3177
3178
3179
3180
3181