1 """GNUmed PostgreSQL connection handling.
2
3 TODO: iterator/generator batch fetching:
4 - http://groups-beta.google.com/group/comp.lang.python/msg/7ff516d7d9387dad
5 - search Google for "Geneator/Iterator Nesting Problem - Any Ideas? 2.4"
6
7 winner:
8 def resultset_functional_batchgenerator(cursor, size=100):
9 for results in iter(lambda: cursor.fetchmany(size), []):
10 for rec in results:
11 yield rec
12 """
13
14 __author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
15 __license__ = 'GPL v2 or later (details at http://www.gnu.org)'
16
17
18 import time
19 import sys
20 import os
21 import stat
22 import io
23 import codecs
24 import logging
25 import datetime as pydt
26 import re as regex
27 import threading
28 import hashlib
29 import shutil
30
31
32
33 if __name__ == '__main__':
34 sys.path.insert(0, '../../')
35 from Gnumed.pycommon import gmLoginInfo
36 from Gnumed.pycommon import gmExceptions
37 from Gnumed.pycommon import gmDateTime
38 from Gnumed.pycommon import gmBorg
39 from Gnumed.pycommon import gmI18N
40 from Gnumed.pycommon import gmLog2
41 from Gnumed.pycommon import gmTools
42 from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character, format_dict_like
43
44 _log = logging.getLogger('gm.db')
45
46
47
48 try:
49 import psycopg2 as dbapi
50 except ImportError:
51 _log.exception("Python database adapter psycopg2 not found.")
52 print("CRITICAL ERROR: Cannot find module psycopg2 for connecting to the database server.")
53 raise
54
55
56 _log.info('psycopg2 version: %s' % dbapi.__version__)
57 _log.info('PostgreSQL via DB-API module "%s": API level %s, thread safety %s, parameter style "%s"' % (dbapi, dbapi.apilevel, dbapi.threadsafety, dbapi.paramstyle))
58 if not (float(dbapi.apilevel) >= 2.0):
59 raise ImportError('gmPG2: supported DB-API level too low')
60 if not (dbapi.threadsafety > 0):
61 raise ImportError('gmPG2: lacking minimum thread safety in psycopg2')
62 if not (dbapi.paramstyle == 'pyformat'):
63 raise ImportError('gmPG2: lacking pyformat (%%(<name>)s style) placeholder support in psycopg2')
64 try:
65 dbapi.__version__.index('dt')
66 except ValueError:
67 raise ImportError('gmPG2: lacking datetime support in psycopg2')
68 try:
69 dbapi.__version__.index('ext')
70 except ValueError:
71 raise ImportError('gmPG2: lacking extensions support in psycopg2')
72 try:
73 dbapi.__version__.index('pq3')
74 except ValueError:
75 raise ImportError('gmPG2: lacking v3 backend protocol support in psycopg2')
76
77 import psycopg2.extras
78 import psycopg2.extensions
79 import psycopg2.pool
80 import psycopg2.errorcodes as sql_error_codes
81
82
83 _default_client_encoding = 'UTF8'
84 _log.info('assuming default client encoding of [%s]' % _default_client_encoding)
85
86
87 _default_client_timezone = None
88 _sql_set_timezone = None
89 _timestamp_template = "cast('%s' as timestamp with time zone)"
90 FixedOffsetTimezone = dbapi.tz.FixedOffsetTimezone
91
92 _default_dsn = None
93 _default_login = None
94
95 default_database = 'gnumed_v22'
96
97 postgresql_version_string = None
98 postgresql_version = None
99
100 __ro_conn_pool = None
101
102 auto_request_login_params = True
103
104
105
106
107 known_schema_hashes = {
108 0: 'not released, testing only',
109 2: 'b09d50d7ed3f91ddf4c4ddb8ea507720',
110 3: 'e73718eaf230d8f1d2d01afa8462e176',
111 4: '4428ccf2e54c289136819e701bb095ea',
112 5: '7e7b093af57aea48c288e76632a382e5',
113 6: '90e2026ac2efd236da9c8608b8685b2d',
114 7: '6c9f6d3981483f8e9433df99d1947b27',
115 8: '89b13a7af83337c3aad153b717e52360',
116 9: '641a9b2be3c378ffc2bb2f0b1c9f051d',
117 10: '7ef42a8fb2bd929a2cdd0c63864b4e8a',
118 11: '03042ae24f3f92877d986fb0a6184d76',
119 12: '06183a6616db62257e22814007a8ed07',
120 13: 'fab7c1ae408a6530c47f9b5111a0841e',
121 14: 'e170d543f067d1ea60bfe9076b1560cf',
122 15: '70012ff960b77ecdff4981c94b5b55b6',
123 16: '0bcf44ca22c479b52976e5eda1de8161',
124 17: '161428ee97a00e3bf56168c3a15b7b50',
125 18: 'a0f9efcabdecfb4ddb6d8c0b69c02092',
126
127
128 19: '57f009a159f55f77525cc0291e0c8b60',
129 20: 'baed1901ed4c2f272b56c8cb2c6d88e8',
130 21: 'e6a51a89dd22b75b61ead8f7083f251f',
131 22: 'bf45f01327fb5feb2f5d3c06ba4a6792'
132 }
133
134 map_schema_hash2version = {
135 'b09d50d7ed3f91ddf4c4ddb8ea507720': 2,
136 'e73718eaf230d8f1d2d01afa8462e176': 3,
137 '4428ccf2e54c289136819e701bb095ea': 4,
138 '7e7b093af57aea48c288e76632a382e5': 5,
139 '90e2026ac2efd236da9c8608b8685b2d': 6,
140 '6c9f6d3981483f8e9433df99d1947b27': 7,
141 '89b13a7af83337c3aad153b717e52360': 8,
142 '641a9b2be3c378ffc2bb2f0b1c9f051d': 9,
143 '7ef42a8fb2bd929a2cdd0c63864b4e8a': 10,
144 '03042ae24f3f92877d986fb0a6184d76': 11,
145 '06183a6616db62257e22814007a8ed07': 12,
146 'fab7c1ae408a6530c47f9b5111a0841e': 13,
147 'e170d543f067d1ea60bfe9076b1560cf': 14,
148 '70012ff960b77ecdff4981c94b5b55b6': 15,
149 '0bcf44ca22c479b52976e5eda1de8161': 16,
150 '161428ee97a00e3bf56168c3a15b7b50': 17,
151 'a0f9efcabdecfb4ddb6d8c0b69c02092': 18,
152
153
154 '57f009a159f55f77525cc0291e0c8b60': 19,
155 'baed1901ed4c2f272b56c8cb2c6d88e8': 20,
156 'e6a51a89dd22b75b61ead8f7083f251f': 21,
157 'bf45f01327fb5feb2f5d3c06ba4a6792': 22
158 }
159
160 map_client_branch2required_db_version = {
161 'GIT tree': 0,
162 '0.3': 9,
163 '0.4': 10,
164 '0.5': 11,
165 '0.6': 12,
166 '0.7': 13,
167 '0.8': 14,
168 '0.9': 15,
169 '1.0': 16,
170 '1.1': 16,
171 '1.2': 17,
172 '1.3': 18,
173 '1.4': 19,
174 '1.5': 20,
175 '1.6': 21,
176 '1.7': 22,
177 '1.8': 22
178 }
179
180 map_psyco_tx_status2str = [
181 'TRANSACTION_STATUS_IDLE',
182 'TRANSACTION_STATUS_ACTIVE',
183 'TRANSACTION_STATUS_INTRANS',
184 'TRANSACTION_STATUS_INERROR',
185 'TRANSACTION_STATUS_UNKNOWN'
186 ]
187
188 map_psyco_conn_status2str = [
189 '0 - ?',
190 'STATUS_READY',
191 'STATUS_BEGIN_ALIAS_IN_TRANSACTION',
192 'STATUS_PREPARED'
193 ]
194
195 map_psyco_iso_level2str = {
196 None: 'ISOLATION_LEVEL_DEFAULT (configured on server)',
197 0: 'ISOLATION_LEVEL_AUTOCOMMIT',
198 1: 'ISOLATION_LEVEL_READ_UNCOMMITTED',
199 2: 'ISOLATION_LEVEL_REPEATABLE_READ',
200 3: 'ISOLATION_LEVEL_SERIALIZABLE',
201 4: 'ISOLATION_LEVEL_READ_UNCOMMITTED'
202 }
203
204
205 query_table_col_defs = """select
206 cols.column_name,
207 cols.udt_name
208 from
209 information_schema.columns cols
210 where
211 cols.table_schema = %s
212 and
213 cols.table_name = %s
214 order by
215 cols.ordinal_position"""
216
217 query_table_attributes = """select
218 cols.column_name
219 from
220 information_schema.columns cols
221 where
222 cols.table_schema = %s
223 and
224 cols.table_name = %s
225 order by
226 cols.ordinal_position"""
227
228
229
230 SQL_foreign_key_name = """SELECT
231 fk_tbl.*,
232 (SELECT nspname FROM pg_catalog.pg_namespace WHERE oid = fk_tbl.connamespace) AS constraint_schema,
233 fk_tbl.conname AS constraint_name,
234 (SELECT nspname FROM pg_catalog.pg_namespace WHERE oid = (SELECT relnamespace FROM pg_class where oid = fk_tbl.conrelid)) AS source_schema,
235 (SELECT relname FROM pg_catalog.pg_class where oid = fk_tbl.conrelid) AS source_table,
236 (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = fk_tbl.conkey[1] AND attrelid = (%(src_schema)s || '.' || %(src_tbl)s)::regclass) AS source_column,
237 (SELECT nspname FROM pg_catalog.pg_namespace WHERE oid = (SELECT relnamespace FROM pg_class where oid = fk_tbl.confrelid)) AS target_schema,
238 (SELECT relname FROM pg_catalog.pg_class where oid = fk_tbl.confrelid) AS target_table,
239 (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = fk_tbl.confkey[1] AND attrelid = (%(target_schema)s || '.' || %(target_tbl)s)::regclass) AS target_column
240 FROM
241 pg_catalog.pg_constraint fk_tbl
242 WHERE
243 fk_tbl.contype = 'f'
244 AND
245 fk_tbl.conrelid = (%(src_schema)s || '.' || %(src_tbl)s)::regclass
246 AND
247 fk_tbl.conkey[1] = (
248 SELECT
249 col_tbl1.attnum
250 FROM
251 pg_catalog.pg_attribute col_tbl1
252 WHERE
253 col_tbl1.attname = %(src_col)s
254 AND
255 col_tbl1.attrelid = (%(src_schema)s || '.' || %(src_tbl)s)::regclass
256 )
257 AND
258 fk_tbl.confrelid = (%(target_schema)s || '.' || %(target_tbl)s)::regclass
259 AND
260 fk_tbl.confkey[1] = (
261 SELECT
262 col_tbl2.attnum
263 FROM
264 pg_catalog.pg_attribute col_tbl2
265 WHERE
266 col_tbl2.attname = %(target_col)s
267 AND
268 col_tbl2.attrelid = (%(target_schema)s || '.' || %(target_tbl)s)::regclass
269 )
270 """
271
272 SQL_get_index_name = """
273 SELECT
274 (SELECT nspname FROM pg_namespace WHERE pg_namespace.oid = pg_class.relnamespace)
275 AS index_schema,
276 pg_class.relname
277 AS index_name
278 FROM
279 pg_class
280 WHERE
281 pg_class.oid IN (
282 SELECT
283 indexrelid
284 FROM
285 pg_index
286 WHERE
287 pg_index.indrelid = %(idx_tbl)s::regclass
288 AND
289 pg_index.indnatts = 1 -- only one column in index
290 AND
291 pg_index.indkey[0] IN (
292 SELECT
293 pg_attribute.attnum
294 FROM
295 pg_attribute
296 WHERE
297 pg_attribute.attrelid = %(idx_tbl)s::regclass
298 AND
299 pg_attribute.attname = %(idx_col)s
300 )
301 )
302 """
303
304 SQL_get_pk_col_def = """
305 SELECT
306 pg_attribute.attname
307 AS pk_col,
308 format_type(pg_attribute.atttypid, pg_attribute.atttypmod)
309 AS pk_type
310 FROM pg_index, pg_class, pg_attribute, pg_namespace
311 WHERE
312 pg_class.oid = %(table)s::regclass
313 AND
314 indrelid = pg_class.oid
315 AND
316 -- nspname = %%(schema)s
317 -- AND
318 pg_class.relnamespace = pg_namespace.oid
319 AND
320 pg_attribute.attrelid = pg_class.oid
321 AND
322 pg_attribute.attnum = any(pg_index.indkey)
323 AND
324 indisprimary
325 """
326
327
328
329
331
332 if encoding not in psycopg2.extensions.encodings:
333 raise ValueError('psycopg2 does not know how to handle client (wire) encoding [%s]' % encoding)
334
335 py_enc = psycopg2.extensions.encodings[encoding]
336 try:
337 codecs.lookup(py_enc)
338 except LookupError:
339 _log.warning('<codecs> module can NOT handle encoding [psycopg2::<%s> -> Python::<%s>]' % (encoding, py_enc))
340 raise
341
342
343
344
345 global _default_client_encoding
346 _log.info('setting default client encoding from [%s] to [%s]' % (_default_client_encoding, encoding))
347 _default_client_encoding = encoding
348 return True
349
350
362
363
365
366 _log.debug('validating time zone [%s]', timezone)
367
368 cmd = 'set timezone to %(tz)s'
369 args = {'tz': timezone}
370
371 conn.commit()
372 curs = conn.cursor()
373 is_valid = False
374 try:
375 curs.execute(cmd, args)
376 _log.info('time zone [%s] is settable', timezone)
377
378 cmd = """select '1920-01-19 23:00:00+01'::timestamp with time zone"""
379 try:
380 curs.execute(cmd)
381 curs.fetchone()
382 _log.info('time zone [%s] is usable', timezone)
383 is_valid = True
384 except:
385 _log.error('error using time zone [%s]', timezone)
386 except dbapi.DataError:
387 _log.warning('time zone [%s] is not settable', timezone)
388 except:
389 _log.error('failed to set time zone to [%s]', timezone)
390 _log.exception('')
391
392 curs.close()
393 conn.rollback()
394
395 return is_valid
396
397
399 """some timezone defs are abbreviations so try to expand
400 them because "set time zone" doesn't take abbreviations"""
401
402 cmd = """
403 select distinct on (abbrev) name
404 from pg_timezone_names
405 where
406 abbrev = %(tz)s and
407 name ~ '^[^/]+/[^/]+$' and
408 name !~ '^Etc/'
409 """
410 args = {'tz': timezone}
411
412 conn.commit()
413 curs = conn.cursor()
414
415 result = timezone
416 try:
417 curs.execute(cmd, args)
418 rows = curs.fetchall()
419 if len(rows) > 0:
420 result = rows[0]['name']
421 _log.debug('[%s] maps to [%s]', timezone, result)
422 except:
423 _log.exception('cannot expand timezone abbreviation [%s]', timezone)
424
425 curs.close()
426 conn.rollback()
427
428 return result
429
430
476
477
478
479
481 """Text mode request of database login parameters"""
482 import getpass
483 login = gmLoginInfo.LoginInfo()
484
485 print("\nPlease enter the required login parameters:")
486 try:
487 login.host = prompted_input(prompt = "host ('' = non-TCP/IP)", default = '')
488 login.database = prompted_input(prompt = "database", default = default_database)
489 login.user = prompted_input(prompt = "user name", default = '')
490 tmp = 'password for "%s" (not shown): ' % login.user
491 login.password = getpass.getpass(tmp)
492 gmLog2.add_word2hide(login.password)
493 login.port = prompted_input(prompt = "port", default = 5432)
494 except KeyboardInterrupt:
495 _log.warning("user cancelled text mode login dialog")
496 print("user cancelled text mode login dialog")
497 raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!"))
498
499 return login
500
501
503 """GUI (wx) input request for database login parameters.
504
505 Returns gmLoginInfo.LoginInfo object
506 """
507 import wx
508
509
510 if wx.GetApp() is None:
511 raise AssertionError(_("The wxPython GUI framework hasn't been initialized yet!"))
512
513
514
515 import gmAuthWidgets
516 dlg = gmAuthWidgets.cLoginDialog(None, -1)
517 dlg.ShowModal()
518 login = dlg.panel.GetLoginInfo()
519 dlg.Destroy()
520
521
522 if login is None:
523 raise gmExceptions.ConnectionError(_("Can't connect to database without login information!"))
524
525 gmLog2.add_word2hide(login.password)
526
527 return login
528
529
531 """Request login parameters for database connection."""
532
533 if not auto_request_login_params:
534 raise Exception('Cannot request login parameters.')
535
536
537
538 if 'DISPLAY' in os.environ:
539
540 try:
541 return __request_login_params_gui_wx()
542 except:
543 pass
544
545
546
547 return __request_login_params_tui()
548
549
550
551
552 -def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=None):
553 dsn_parts = []
554
555 if (database is not None) and (database.strip() != ''):
556 dsn_parts.append('dbname=%s' % database)
557
558 if (host is not None) and (host.strip() != ''):
559 dsn_parts.append('host=%s' % host)
560
561 if (port is not None) and (str(port).strip() != ''):
562 dsn_parts.append('port=%s' % port)
563
564 if (user is not None) and (user.strip() != ''):
565 dsn_parts.append('user=%s' % user)
566
567 if (password is not None) and (password.strip() != ''):
568 dsn_parts.append('password=%s' % password)
569
570 dsn_parts.append('sslmode=prefer')
571 dsn_parts.append('fallback_application_name=GNUmed')
572
573 return ' '.join(dsn_parts)
574
575
580
581
591
592
594 if login is None:
595 return False
596
597 if login.host is not None:
598 if login.host.strip() == '':
599 login.host = None
600
601 global _default_login
602 _default_login = login
603 _log.info('setting default login from [%s] to [%s]' % (_default_login, login))
604
605 dsn = make_psycopg2_dsn(login.database, login.host, login.port, login.user, login.password)
606
607 global _default_dsn
608 if _default_dsn is None:
609 old_dsn = 'None'
610 else:
611 old_dsn = regex.sub(r'password=[^\s]+', 'password=%s' % u_replacement_character, _default_dsn)
612 _log.info ('setting default DSN from [%s] to [%s]',
613 old_dsn,
614 regex.sub(r'password=[^\s]+', 'password=%s' % u_replacement_character, dsn)
615 )
616 _default_dsn = dsn
617
618 return True
619
620
622 try:
623 pgpass_file = os.path.expanduser(os.path.join('~', '.pgpass'))
624 if os.path.exists(pgpass_file):
625 _log.debug('standard .pgpass (%s) exists', pgpass_file)
626 else:
627 _log.debug('standard .pgpass (%s) not found', pgpass_file)
628 pgpass_var = os.getenv('PGPASSFILE')
629 if pgpass_var is None:
630 _log.debug('$PGPASSFILE not set')
631 else:
632 if os.path.exists(pgpass_var):
633 _log.debug('$PGPASSFILE=%s exists', pgpass_var)
634 else:
635 _log.debug('$PGPASSFILE=%s not found')
636 except Exception:
637 _log.exception('cannot detect .pgpass and or $PGPASSFILE')
638
639
640
641
643 expected_hash = known_schema_hashes[version]
644 if version == 0:
645 args = {'ver': 9999}
646 else:
647 args = {'ver': version}
648 rows, idx = run_ro_queries (
649 link_obj = link_obj,
650 queries = [{
651 'cmd': 'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5',
652 'args': args
653 }]
654 )
655 if rows[0]['md5'] != expected_hash:
656 _log.error('database schema version mismatch')
657 _log.error('expected: %s (%s)' % (version, expected_hash))
658 _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5']))
659 if verbose:
660 _log.debug('schema dump follows:')
661 for line in get_schema_structure(link_obj = link_obj).split():
662 _log.debug(line)
663 _log.debug('schema revision history dump follows:')
664 for line in get_schema_revision_history(link_obj = link_obj):
665 _log.debug(' - '.join(line))
666 return False
667 _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5']))
668 return True
669
670
672 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': 'select md5(gm.concat_table_structure()) as md5'}])
673 try:
674 return map_schema_hash2version[rows[0]['md5']]
675 except KeyError:
676 return 'unknown database schema version, MD5 hash is [%s]' % rows[0]['md5']
677
678
680 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': 'select gm.concat_table_structure()'}])
681 return rows[0][0]
682
683
685 rows, idx = run_ro_queries(link_obj=link_obj, queries = [{'cmd': 'select md5(gm.concat_table_structure()) as md5'}])
686 return rows[0]['md5']
687
688
690
691 if table_exists(link_obj = link_obj, schema = 'gm', table = 'schema_revision'):
692 cmd = """
693 SELECT
694 imported::text,
695 version,
696 filename
697 FROM gm.schema_revision
698 ORDER BY imported"""
699 elif table_exists(link_obj = link_obj, schema = 'public', table = 'gm_schema_revision'):
700 cmd = """
701 SELECT
702 imported::text,
703 version,
704 filename
705 FROM public.gm_schema_revision
706 ORDER BY imported"""
707 else:
708 return []
709
710 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}])
711 return rows
712
714 rows, idx = run_ro_queries(queries = [{'cmd': 'select CURRENT_USER'}])
715 return rows[0][0]
716
717
719 """Get the foreign keys pointing to schema.table.column.
720
721 Does not properly work with multi-column FKs.
722 GNUmed doesn't use any, however.
723 """
724 args = {
725 'schema': schema,
726 'tbl': table,
727 'col': column
728 }
729 cmd = """
730 SELECT
731 %(schema)s AS referenced_schema,
732 %(tbl)s AS referenced_table,
733 %(col)s AS referenced_column,
734 pgc.confkey AS referenced_column_list,
735
736 pgc.conrelid::regclass AS referencing_table,
737 pgc.conkey AS referencing_column_list,
738 (select attname from pg_attribute where attnum = pgc.conkey[1] and attrelid = pgc.conrelid) AS referencing_column
739 FROM
740 pg_constraint pgc
741 WHERE
742 pgc.contype = 'f'
743 AND
744 pgc.confrelid = (
745 select oid from pg_class where relname = %(tbl)s and relnamespace = (
746 select oid from pg_namespace where nspname = %(schema)s
747 )
748 ) and
749 (
750 select attnum
751 from pg_attribute
752 where
753 attrelid = (select oid from pg_class where relname = %(tbl)s and relnamespace = (
754 select oid from pg_namespace where nspname = %(schema)s
755 ))
756 and
757 attname = %(col)s
758 ) = any(pgc.confkey)
759 """
760 rows, idx = run_ro_queries (
761 link_obj = link_obj,
762 queries = [
763 {'cmd': cmd, 'args': args}
764 ]
765 )
766
767 return rows
768
769
770 -def get_index_name(indexed_table=None, indexed_column=None, link_obj=None):
771
772 args = {
773 'idx_tbl': indexed_table,
774 'idx_col': indexed_column
775 }
776 rows, idx = run_ro_queries (
777 link_obj = link_obj,
778 queries = [{'cmd': SQL_get_index_name, 'args': args}],
779 get_col_idx = False
780 )
781
782 return rows
783
784
785 -def get_foreign_key_names(src_schema=None, src_table=None, src_column=None, target_schema=None, target_table=None, target_column=None, link_obj=None):
786
787 args = {
788 'src_schema': src_schema,
789 'src_tbl': src_table,
790 'src_col': src_column,
791 'target_schema': target_schema,
792 'target_tbl': target_table,
793 'target_col': target_column
794 }
795
796 rows, idx = run_ro_queries (
797 link_obj = link_obj,
798 queries = [{'cmd': SQL_foreign_key_name, 'args': args}],
799 get_col_idx = False
800 )
801
802 return rows
803
804
806 """Return child tables of <table>."""
807 cmd = """
808 select
809 pgn.nspname as namespace,
810 pgc.relname as table
811 from
812 pg_namespace pgn,
813 pg_class pgc
814 where
815 pgc.relnamespace = pgn.oid
816 and
817 pgc.oid in (
818 select inhrelid from pg_inherits where inhparent = (
819 select oid from pg_class where
820 relnamespace = (select oid from pg_namespace where nspname = %(schema)s) and
821 relname = %(table)s
822 )
823 )"""
824 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': {'schema': schema, 'table': table}}])
825 return rows
826
827
829 cmd = """SELECT EXISTS (SELECT 1 FROM pg_namespace WHERE nspname = %(schema)s)"""
830 args = {'schema': schema}
831 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}])
832 return rows[0][0]
833
834
836 """Returns false, true."""
837 cmd = """
838 select exists (
839 select 1 from information_schema.tables
840 where
841 table_schema = %s and
842 table_name = %s and
843 table_type = 'BASE TABLE'
844 )"""
845 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': (schema, table)}])
846 return rows[0][0]
847
848
850
851 cmd = """
852 SELECT EXISTS (
853 SELECT 1 FROM pg_proc
854 WHERE proname = %(func)s AND pronamespace = (SELECT oid FROM pg_namespace WHERE nspname = %(schema)s)
855 )
856 """
857 args = {
858 'func': function,
859 'schema': schema
860 }
861 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}])
862 return rows[0][0]
863
864
866 if cursor.description is None:
867 _log.error('no result description available: unused cursor or last query did not select rows')
868 return None
869 col_indices = {}
870 col_index = 0
871 for col_desc in cursor.description:
872 col_name = col_desc[0]
873
874
875
876 if col_name in col_indices:
877 col_name = '%s_%s' % (col_name, col_index)
878 col_indices[col_name] = col_index
879 col_index += 1
880
881 return col_indices
882
883 -def get_col_defs(link_obj=None, schema='public', table=None):
884 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_col_defs, 'args': (schema, table)}])
885 col_names = []
886 col_type = {}
887 for row in rows:
888 col_names.append(row[0])
889
890 if row[1].startswith('_'):
891 col_type[row[0]] = row[1][1:] + '[]'
892 else:
893 col_type[row[0]] = row[1]
894 col_defs = []
895 col_defs.append(col_names)
896 col_defs.append(col_type)
897 return col_defs
898
900 """Return column attributes of table"""
901 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': query_table_attributes, 'args': (schema, table)}])
902 cols = []
903 for row in rows:
904 cols.append(row[0])
905 return cols
906
907
908
909
911 tx_file = io.open(filename, mode = 'wt', encoding = 'utf8')
912 tx_file.write('-- GNUmed database string translations exported %s\n' % gmDateTime.pydt_now_here().strftime('%Y-%m-%d %H:%M'))
913 tx_file.write('-- - contains translations for each of [%s]\n' % ', '.join(get_translation_languages()))
914 tx_file.write('-- - user database language is set to [%s]\n\n' % get_current_user_language())
915 tx_file.write('-- Please email this file to <gnumed-devel@gnu.org>.\n')
916 tx_file.write('-- ----------------------------------------------------------------------------------------------\n\n')
917 tx_file.write('set default_transaction_read_only to off;\n\n')
918 tx_file.write("set client_encoding to 'utf-8';\n\n")
919 tx_file.write('\\unset ON_ERROR_STOP\n\n')
920
921 cmd = 'SELECT lang, orig, trans FROM i18n.translations ORDER BY lang, orig'
922 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False)
923 for row in rows:
924 line = "select i18n.upd_tx(E'%s', E'%s', E'%s');\n" % (
925 row['lang'].replace("'", "\\'"),
926 row['orig'].replace("'", "\\'"),
927 row['trans'].replace("'", "\\'")
928 )
929 tx_file.write(line)
930 tx_file.write('\n')
931
932 tx_file.write('\set ON_ERROR_STOP 1\n')
933 tx_file.close()
934
935 return True
936
937
939 cmd = 'DELETE FROM i18n.translations WHERE lang = %(lang)s AND orig = %(orig)s'
940 args = {'lang': language, 'orig': original}
941 run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = False, end_tx = True)
942 return True
943
944
946 if language is None:
947 cmd = 'SELECT i18n.upd_tx(%(orig)s, %(trans)s)'
948 else:
949 cmd = 'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)'
950 args = {'lang': language, 'orig': original, 'trans': translation}
951 run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False, link_obj = link_obj)
952 return args
953
954
956 rows, idx = run_ro_queries (
957 queries = [{'cmd': 'select distinct lang from i18n.translations'}]
958 )
959 return [ r[0] for r in rows ]
960
961
963
964 args = {'lang': language}
965 _log.debug('language [%s]', language)
966
967 if order_by is None:
968 order_by = 'ORDER BY %s' % order_by
969 else:
970 order_by = 'ORDER BY lang, orig'
971
972 if language is None:
973 cmd = """
974 SELECT DISTINCT ON (orig, lang)
975 lang, orig, trans
976 FROM ((
977
978 -- strings stored as translation keys whether translated or not
979 SELECT
980 NULL as lang,
981 ik.orig,
982 NULL AS trans
983 FROM
984 i18n.keys ik
985
986 ) UNION ALL (
987
988 -- already translated strings
989 SELECT
990 it.lang,
991 it.orig,
992 it.trans
993 FROM
994 i18n.translations it
995
996 )) as translatable_strings
997 %s""" % order_by
998 else:
999 cmd = """
1000 SELECT DISTINCT ON (orig, lang)
1001 lang, orig, trans
1002 FROM ((
1003
1004 -- strings stored as translation keys whether translated or not
1005 SELECT
1006 %%(lang)s as lang,
1007 ik.orig,
1008 i18n._(ik.orig, %%(lang)s) AS trans
1009 FROM
1010 i18n.keys ik
1011
1012 ) UNION ALL (
1013
1014 -- already translated strings
1015 SELECT
1016 %%(lang)s as lang,
1017 it.orig,
1018 i18n._(it.orig, %%(lang)s) AS trans
1019 FROM
1020 i18n.translations it
1021
1022 )) AS translatable_strings
1023 %s""" % order_by
1024
1025 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
1026
1027 if rows is None:
1028 _log.error('no translatable strings found')
1029 else:
1030 _log.debug('%s translatable strings found', len(rows))
1031
1032 return rows
1033
1034
1036 cmd = 'select i18n.get_curr_lang()'
1037 rows, idx = run_ro_queries(queries = [{'cmd': cmd}])
1038 return rows[0][0]
1039
1040
1042 """Set the user language in the database.
1043
1044 user = None: current db user
1045 language = None: unset
1046 """
1047 _log.info('setting database language for user [%s] to [%s]', user, language)
1048
1049 args = {
1050 'usr': user,
1051 'lang': language
1052 }
1053
1054 if language is None:
1055 if user is None:
1056 queries = [{'cmd': 'select i18n.unset_curr_lang()'}]
1057 else:
1058 queries = [{'cmd': 'select i18n.unset_curr_lang(%(usr)s)', 'args': args}]
1059 queries.append({'cmd': 'select True'})
1060 else:
1061 if user is None:
1062 queries = [{'cmd': 'select i18n.set_curr_lang(%(lang)s)', 'args': args}]
1063 else:
1064 queries = [{'cmd': 'select i18n.set_curr_lang(%(lang)s, %(usr)s)', 'args': args}]
1065
1066 rows, idx = run_rw_queries(queries = queries, return_data = True)
1067
1068 if not rows[0][0]:
1069 _log.error('cannot set database language to [%s] for user [%s]', language, user)
1070
1071 return rows[0][0]
1072
1074 """Set the user language in the database.
1075
1076 - regardless of whether there is any translation available.
1077 - only for the current user
1078 """
1079 _log.info('forcing database language for current db user to [%s]', language)
1080
1081 run_rw_queries(queries = [{
1082 'cmd': 'select i18n.force_curr_lang(%(lang)s)',
1083 'args': {'lang': language}
1084 }])
1085
1086
1087
1088
1090 cmd = 'notify "db_maintenance_warning"'
1091 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
1092
1093
1095 cmd = 'notify "db_maintenance_disconnect"'
1096 run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
1097
1098
1100 cmd = 'SELECT %(candidate)s::interval'
1101 try:
1102 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
1103 return True
1104 except:
1105 cmd = 'SELECT %(candidate)s::text::interval'
1106 try:
1107 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': {'candidate': candidate}}])
1108 return True
1109 except:
1110 return False
1111
1112
1113 -def lock_row(link_obj=None, table=None, pk=None, exclusive=False):
1114 """Uses pg_advisory(_shared).
1115
1116 - locks stack upon each other and need one unlock per lock
1117 - same connection:
1118 - all locks succeed
1119 - different connections:
1120 - shared + shared succeed
1121 - shared + exclusive fail
1122 """
1123 _log.debug('locking row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1124 if exclusive:
1125 cmd = """SELECT pg_try_advisory_lock('%s'::regclass::oid::int, %s)""" % (table, pk)
1126 else:
1127 cmd = """SELECT pg_try_advisory_lock_shared('%s'::regclass::oid::int, %s)""" % (table, pk)
1128 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False)
1129 if rows[0][0]:
1130 return True
1131 _log.warning('cannot lock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1132 return False
1133
1134
1135 -def unlock_row(link_obj=None, table=None, pk=None, exclusive=False):
1136 """Uses pg_advisory_unlock(_shared).
1137
1138 - each lock needs one unlock
1139 """
1140 _log.debug('trying to unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1141 if exclusive:
1142 cmd = "SELECT pg_advisory_unlock('%s'::regclass::oid::int, %s)" % (table, pk)
1143 else:
1144 cmd = "SELECT pg_advisory_unlock_shared('%s'::regclass::oid::int, %s)" % (table, pk)
1145 rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd}], get_col_idx = False)
1146 if rows[0][0]:
1147 return True
1148 _log.warning('cannot unlock row: [%s] [%s] (exclusive: %s)', table, pk, exclusive)
1149 return False
1150
1151
1153 """Looks at pk_locks
1154
1155 - does not take into account locks other than 'advisory', however
1156 """
1157 cmd = """SELECT EXISTS (
1158 SELECT 1 FROM pg_locks WHERE
1159 classid = '%s'::regclass::oid::int
1160 AND
1161 objid = %s
1162 AND
1163 locktype = 'advisory'
1164 )""" % (table, pk)
1165 rows, idx = run_ro_queries(queries = [{'cmd': cmd}], get_col_idx = False)
1166 if rows[0][0]:
1167 _log.debug('row is locked: [%s] [%s]', table, pk)
1168 return True
1169 _log.debug('row is NOT locked: [%s] [%s]', table, pk)
1170 return False
1171
1172
1174
1175 md5 = hashlib.md5()
1176 md5.update(('%s' % cache_key_data).encode('utf8'))
1177 md5_sum = md5.hexdigest()
1178 cached_name = os.path.join(gmTools.gmPaths().bytea_cache_dir, md5_sum)
1179 _log.debug('caching [%s] as [%s]', filename, cached_name)
1180 gmTools.remove_file(cached_name, log_error = True, force = True)
1181 try:
1182 shutil.copyfile(filename, cached_name, follow_symlinks = True)
1183 except shutil.SameFileError:
1184 pass
1185 except OSError:
1186 _log.exception('cannot copy file into cache: [%s] -> [%s]', filename, cached_name)
1187 return None
1188 PERMS_owner_only = 0o0660
1189 try:
1190 os.chmod(cached_name, PERMS_owner_only)
1191 except PermissionError:
1192 _log.exception('cannot set cache file [%s] permissions to [%s]', cached_name, stat.filemode(PERMS_owner_only))
1193 return None
1194 return cached_name
1195
1196
1198
1199 md5 = hashlib.md5()
1200 md5.update(('%s' % cache_key_data).encode('utf8'))
1201 md5_sum = md5.hexdigest()
1202 cached_name = os.path.join(gmTools.gmPaths().bytea_cache_dir, md5_sum)
1203 _log.debug('[%s]: %s', md5_sum, cached_name)
1204 try:
1205 stat = os.stat(cached_name)
1206 except FileNotFoundError:
1207 return None
1208 _log.debug('cache hit: %s [%s]', cached_name, stat)
1209 if os.path.islink(cached_name) or (not os.path.isfile(cached_name)):
1210 _log.error('object in cache is not a regular file: %s', cached_name)
1211 _log.error('possibly an attack, removing')
1212 removed = gmTools.remove_file(cached_name, log_error = True)
1213 if removed:
1214 return None
1215 raise BaseException('cannot delete suspicious object in cache dir: %s', cached_name)
1216 if stat.st_size == data_size:
1217 return cached_name
1218 _log.debug('size in cache [%s] <> expected size [%s], removing cached file', stat.st_size, data_size)
1219 removed = gmTools.remove_file(cached_name, log_error = True)
1220 if removed:
1221 return None
1222 raise BaseException('cannot remove suspicous object from cache dir: %s', cached_name)
1223
1224
1225 -def bytea2file(data_query=None, filename=None, chunk_size=0, data_size=None, data_size_query=None, conn=None, link2cached=True):
1226
1227 if data_size is None:
1228 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query])
1229 data_size = rows[0][0]
1230 if data_size in [None, 0]:
1231 conn.rollback()
1232 return True
1233
1234 cache_key_data = '%s' % data_query
1235 cached_filename = __get_filename_in_cache(cache_key_data = cache_key_data, data_size = data_size)
1236 if cached_filename is not None:
1237
1238 if link2cached:
1239 if gmTools.mklink(cached_filename, filename, overwrite = False):
1240 return True
1241 _log.debug('cannot link to cache, trying copy-from-cache')
1242
1243 try:
1244 shutil.copyfile(cached_filename, filename, follow_symlinks = True)
1245 return True
1246 except shutil.SameFileError:
1247 pass
1248 except OSError:
1249 _log.exception('cannot copy file from cache: [%s] -> [%s]', cached_filename, filename)
1250
1251 _log.debug('downloading new copy of file, despite found in cache')
1252
1253 outfile = io.open(filename, 'wb')
1254 result = bytea2file_object (
1255 data_query = data_query,
1256 file_obj = outfile,
1257 chunk_size = chunk_size,
1258 data_size = data_size,
1259 data_size_query = data_size_query,
1260 conn = conn
1261 )
1262 outfile.close()
1263 __store_file_in_cache(filename, cache_key_data)
1264
1265 return result
1266
1267
1268 -def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=None, data_size_query=None, conn=None):
1269 """Store data from a bytea field into a file.
1270
1271 <data_query>
1272 - dict {'cmd': ..., 'args': ...}
1273 - 'cmd' must be a string containing "... substring(data from %(start)s for %(size)s) ..."
1274 - 'args' must be a dict
1275 - must return one row with one field of type bytea
1276 <file>
1277 - must be a file like Python object
1278 <data_size>
1279 - integer of the total size of the expected data or None
1280 <data_size_query>
1281 - dict {'cmd': ..., 'args': ...}
1282 - must return one row with one field with the octet_length() of the data field
1283 - used only when <data_size> is None
1284 """
1285 if data_size == 0:
1286 return True
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299 if conn is None:
1300 conn = get_raw_connection(readonly = True)
1301
1302 if data_size is None:
1303 rows, idx = run_ro_queries(link_obj = conn, queries = [data_size_query])
1304 data_size = rows[0][0]
1305 if data_size in [None, 0]:
1306 conn.rollback()
1307 return True
1308
1309 max_chunk_size = 1024 * 1024 * 20
1310 if chunk_size == 0:
1311 chunk_size = min(data_size, max_chunk_size)
1312
1313 _log.debug('expecting %s bytes of BYTEA data in chunks of %s bytes', data_size, chunk_size)
1314
1315
1316
1317
1318 needed_chunks, remainder = divmod(data_size, chunk_size)
1319 _log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder)
1320
1321
1322
1323
1324
1325
1326
1327 for chunk_id in range(needed_chunks):
1328 chunk_start = (chunk_id * chunk_size) + 1
1329 data_query['args']['start'] = chunk_start
1330 data_query['args']['size'] = chunk_size
1331 try:
1332 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
1333 except:
1334 _log.error('cannot retrieve chunk [%s/%s], size [%s], try decreasing chunk size' % (chunk_id+1, needed_chunks, chunk_size))
1335 conn.rollback()
1336 raise
1337
1338 file_obj.write(rows[0][0])
1339
1340
1341 if remainder > 0:
1342 chunk_start = (needed_chunks * chunk_size) + 1
1343 data_query['args']['start'] = chunk_start
1344 data_query['args']['size'] = remainder
1345 try:
1346 rows, idx = run_ro_queries(link_obj=conn, queries=[data_query])
1347 except:
1348 _log.error('cannot retrieve remaining [%s] bytes' % remainder)
1349 conn.rollback()
1350 raise
1351
1352 file_obj.write(rows[0][0])
1353
1354 conn.rollback()
1355 return True
1356
1357
1358 -def file2bytea(query=None, filename=None, args=None, conn=None, file_md5=None):
1359 """Store data from a file into a bytea field.
1360
1361 The query must:
1362 - be in unicode
1363 - contain a format spec identifying the row (eg a primary key)
1364 matching <args> if it is an UPDATE
1365 - contain a format spec " <field> = %(data)s::bytea"
1366
1367 The query CAN return the MD5 of the inserted data:
1368 RETURNING md5(<field>) AS md5
1369 in which case it will compare it to the md5
1370 of the file.
1371 """
1372
1373 infile = open(filename, "rb")
1374 data_as_byte_string = infile.read()
1375 infile.close()
1376 if args is None:
1377 args = {}
1378
1379 args['data'] = memoryview(data_as_byte_string)
1380 del(data_as_byte_string)
1381
1382
1383 if conn is None:
1384 conn = get_raw_connection(readonly = False)
1385 close_conn = True
1386 else:
1387 close_conn = False
1388
1389 rows, idx = run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = False, return_data = (file_md5 is not None))
1390
1391 success_status = True
1392 if file_md5 is None:
1393 conn.commit()
1394 else:
1395 db_md5 = rows[0]['md5']
1396 if file_md5 != db_md5:
1397 conn.rollback()
1398 success_status = False
1399 _log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
1400 else:
1401 conn.commit()
1402 _log.debug('MD5 sums of data file and database BYTEA field match: [file::%s] = [DB::%s]', file_md5, db_md5)
1403
1404 if close_conn:
1405 conn.close()
1406
1407 return success_status
1408
1409
1410 -def file2lo(filename=None, conn=None, check_md5=False):
1411
1412 file_size = os.path.getsize(filename)
1413 if file_size > (1024 * 1024) * 1024:
1414 _log.debug('file size of [%s] > 1 GB, supposedly not supported by psycopg2 large objects (but seems to work anyway ?)', file_size)
1415
1416
1417 if conn is None:
1418 conn = get_raw_connection(readonly = False)
1419 close_conn = conn.close
1420 else:
1421 close_conn = __noop
1422 _log.debug('[%s] -> large object', filename)
1423
1424
1425 lo = conn.lobject(0, 'w', 0, filename)
1426 lo_oid = lo.oid
1427 lo.close()
1428 _log.debug('large object OID: %s', lo_oid)
1429
1430
1431 if file_md5 is None:
1432 conn.commit()
1433 close_conn()
1434 return lo_oid
1435 cmd = 'SELECT md5(lo_get(%(loid)s::oid))'
1436 args = {'loid': lo_oid}
1437 rows, idx = run_ro_queries(link_obj = conn, queries = [{'cmd': cmd, 'args': args}])
1438 db_md5 = rows[0][0]
1439 if file_md5 == db_md5:
1440 conn.commit()
1441 close_conn()
1442 _log.debug('MD5 sums of data file and database large object match: [file::%s] = [DB::%s]', file_md5, db_md5)
1443 return lo_oid
1444 conn.rollback()
1445 close_conn()
1446 _log.error('MD5 sums of data file and database large object [%s] do not match: [file::%s] <> [DB::%s]', lo_oid, file_md5, db_md5)
1447 return -1
1448
1449
1451
1452 file_size = os.path.getsize(filename)
1453 if file_size > (1024 * 1024) * 1024:
1454 _log.debug('file size of [%s] > 1 GB, supposedly not supported by psycopg2 large objects (but seems to work anyway ?)', file_size)
1455
1456
1457 if conn is None:
1458 conn = get_raw_connection(readonly = False)
1459 close_conn = conn.close
1460 else:
1461 close_conn = __noop
1462 _log.debug('[%s] -> large object', filename)
1463
1464
1465 lo = conn.lobject(0, 'w', 0, filename)
1466 lo_oid = lo.oid
1467 lo.close()
1468 _log.debug('large object OID: %s', lo_oid)
1469
1470
1471 if file_md5 is None:
1472 conn.commit()
1473 close_conn()
1474 return lo_oid
1475 cmd = 'SELECT md5(lo_get(%(loid)s::oid))'
1476 args = {'loid': lo_oid}
1477 rows, idx = run_ro_queries(link_obj = conn, queries = [{'cmd': cmd, 'args': args}])
1478 db_md5 = rows[0][0]
1479 if file_md5 == db_md5:
1480 conn.commit()
1481 close_conn()
1482 _log.debug('MD5 sums of data file and database large object match: [file::%s] = [DB::%s]', file_md5, db_md5)
1483 return lo_oid
1484 conn.rollback()
1485 close_conn()
1486 _log.error('MD5 sums of data file and database large object [%s] do not match: [file::%s] <> [DB::%s]', lo_oid, file_md5, db_md5)
1487 return -1
1488
1489
1490 -def file2bytea_copy_from(table=None, columns=None, filename=None, conn=None, md5_query=None, file_md5=None):
1491
1492
1493
1494
1495 chunk_size = 32 * (1024 * 1024)
1496 _log.debug('[%s] (%s bytes) --(%s bytes)-> %s(%s)', filename, os.path.getsize(filename), chunk_size, table, columns)
1497 if conn is None:
1498 conn = get_raw_connection(readonly = False)
1499 close_conn = True
1500 else:
1501 close_conn = False
1502 curs = conn.cursor()
1503
1504 infile = open(filename, "rb")
1505 curs.copy_from(infile, table, size = chunk_size, columns = columns)
1506 infile.close()
1507 curs.close()
1508 if None in [file_md5, md5_query]:
1509 conn.commit()
1510 close_conn()
1511 return True
1512
1513 rows, idx = run_ro_queries(link_obj = conn, queries = [md5_query])
1514 db_md5 = rows[0][0]
1515 if file_md5 == db_md5:
1516 conn.commit()
1517 close_conn()
1518 _log.debug('MD5 sums of data file and database BYTEA field match: [file::%s] = [DB::%s]', file_md5, db_md5)
1519 return True
1520 close_conn()
1521 _log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
1522 return False
1523
1524
1525 -def file2bytea_overlay(query=None, args=None, filename=None, conn=None, md5_query=None, file_md5=None):
1526 """Store data from a file into a bytea field.
1527
1528 The query must:
1529 - 'cmd' must be in unicode
1530 - 'cmd' must contain a format spec identifying the row (eg
1531 a primary key) matching <args> if it is an UPDATE
1532 - 'cmd' must contain "... SET ... <some_bytea_field> = OVERLAY(some_bytea_field PLACING %(data)s::bytea FROM %(start)s FOR %(size)s) ..."
1533 - 'args' must be a dict matching 'cmd'
1534
1535 The query CAN return the MD5 of the inserted data:
1536 RETURNING md5(<field>) AS md5
1537 in which case it will compare it to the md5
1538 of the file.
1539
1540 UPDATE
1541 the_table
1542 SET
1543 bytea_field = OVERLAY (
1544 coalesce(bytea_field, '':bytea),
1545 PLACING
1546 %(data)s::bytea
1547 FROM
1548 %(start)s
1549 FOR
1550 %(size)s
1551 )
1552 WHERE
1553 primary_key = pk_value
1554
1555 SELECT md5(bytea_field) FROM the_table WHERE primary_key = pk_value
1556 """
1557 chunk_size = 32 * (1024 * 1024)
1558 file_size = os.path.getsize(filename)
1559 if file_size <= chunk_size:
1560 chunk_size = file_size
1561 needed_chunks, remainder = divmod(file_size, chunk_size)
1562 _log.debug('file data: %s bytes, chunks: %s, chunk size: %s bytes, remainder: %s bytes', file_size, needed_chunks, chunk_size, remainder)
1563
1564 if conn is None:
1565 conn = get_raw_connection(readonly = False)
1566 close_conn = conn.close
1567 else:
1568 close_conn = __noop
1569
1570 infile = open(filename, "rb")
1571
1572 for chunk_id in range(needed_chunks):
1573 chunk_start = (chunk_id * chunk_size) + 1
1574 args['start'] = chunk_start
1575 args['size'] = chunk_size
1576 data_as_byte_string = infile.read(chunk_size)
1577
1578 args['data'] = memoryview(data_as_byte_string)
1579 del(data_as_byte_string)
1580 try:
1581 rows, idx = run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = False, return_data = False)
1582 except Exception:
1583 _log.exception('cannot write chunk [%s/%s] of size [%s], try decreasing chunk size', chunk_id+1, needed_chunks, chunk_size)
1584 conn.rollback()
1585 close_conn()
1586 infile.close()
1587 raise
1588
1589 if remainder > 0:
1590 chunk_start = (needed_chunks * chunk_size) + 1
1591 args['start'] = chunk_start
1592 args['size'] = remainder
1593 data_as_byte_string = infile.read(remainder)
1594
1595 args['data'] = memoryview(data_as_byte_string)
1596 del(data_as_byte_string)
1597 try:
1598 rows, idx = run_rw_queries(link_obj = conn, queries = [{'cmd': query, 'args': args}], end_tx = False, return_data = False)
1599 except Exception:
1600 _log.error('cannot retrieve remaining [%s] bytes' % remainder)
1601 conn.rollback()
1602 close_conn()
1603 infile.close()
1604 raise
1605 infile.close()
1606 if None in [file_md5, md5_query]:
1607 conn.commit()
1608 close_conn()
1609 return True
1610
1611 rows, idx = run_ro_queries(link_obj = conn, queries = [{'cmd': md5_query, 'args': args}])
1612 db_md5 = rows[0][0]
1613 if file_md5 == db_md5:
1614 conn.commit()
1615 close_conn()
1616 _log.debug('MD5 sums of data file and database BYTEA field match: [file::%s] = [DB::%s]', file_md5, db_md5)
1617 return True
1618 close_conn()
1619 _log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
1620 return False
1621
1622
1624
1625 if conn is None:
1626 conn = get_connection(readonly = False)
1627
1628 from Gnumed.pycommon import gmPsql
1629 psql = gmPsql.Psql(conn)
1630
1631 if psql.run(sql_script) == 0:
1632 query = {
1633 'cmd': 'select gm.log_script_insertion(%(name)s, %(ver)s)',
1634 'args': {'name': sql_script, 'ver': 'current'}
1635 }
1636 run_rw_queries(link_obj = conn, queries = [query])
1637 conn.commit()
1638 return True
1639
1640 _log.error('error running sql script: %s', sql_script)
1641 return False
1642
1643
1645 """Escape input for use in a PostgreSQL regular expression.
1646
1647 If a fragment comes from user input and is to be used
1648 as a regular expression we need to make sure it doesn't
1649 contain invalid regex patterns such as unbalanced ('s.
1650
1651 <escape_all>
1652 True: try to escape *all* metacharacters
1653 False: only escape those which render the regex invalid
1654 """
1655 return expression.replace (
1656 '(', '\('
1657 ).replace (
1658 ')', '\)'
1659 ).replace (
1660 '[', '\['
1661 ).replace (
1662 '+', '\+'
1663 ).replace (
1664 '.', '\.'
1665 ).replace (
1666 '*', '\*'
1667 )
1668
1669
1670
1672
1673 tx_status = conn.get_transaction_status()
1674 if tx_status in [ psycopg2.extensions.TRANSACTION_STATUS_INERROR, psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN ]:
1675 isolation_level = '%s (tx aborted or unknown, cannot retrieve)' % conn.isolation_level
1676 else:
1677 isolation_level = '%s (%s)' % (conn.isolation_level, map_psyco_iso_level2str[conn.isolation_level])
1678 conn_status = '%s (%s)' % (conn.status, map_psyco_conn_status2str[conn.status])
1679 if conn.closed != 0:
1680 conn_status = 'undefined (%s)' % conn_status
1681 try:
1682 conn_deferrable = conn.deferrable
1683 except AttributeError:
1684 conn_deferrable = 'unavailable'
1685
1686 d = {
1687 'identity': id(conn),
1688 'backend PID': conn.get_backend_pid(),
1689 'protocol version': conn.protocol_version,
1690 'encoding': conn.encoding,
1691 'closed': conn.closed,
1692 'readonly': conn.readonly,
1693 'autocommit': conn.autocommit,
1694 'isolation level (psyco)': isolation_level,
1695 'async': conn.async_,
1696 'deferrable': conn_deferrable,
1697 'transaction status': '%s (%s)' % (tx_status, map_psyco_tx_status2str[tx_status]),
1698 'connection status': conn_status,
1699 'executing async op': conn.isexecuting(),
1700 'type': type(conn)
1701 }
1702 return '%s\n' % conn + format_dict_like (
1703 d,
1704 relevant_keys = [
1705 'type',
1706 'identity',
1707 'backend PID',
1708 'protocol version',
1709 'encoding',
1710 'isolation level (psyco)',
1711 'readonly',
1712 'autocommit',
1713 'closed',
1714 'connection status',
1715 'transaction status',
1716 'deferrable',
1717 'async',
1718 'executing async op'
1719 ],
1720 tabular = True,
1721 value_delimiters = None
1722 )
1723
1724
1726 conn = cursor.connection
1727
1728 tx_status = conn.get_transaction_status()
1729 if tx_status in [ psycopg2.extensions.TRANSACTION_STATUS_INERROR, psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN ]:
1730 isolation_level = 'tx aborted or unknown, cannot retrieve'
1731 else:
1732 isolation_level = conn.isolation_level
1733 try:
1734 conn_deferrable = conn.deferrable
1735 except AttributeError:
1736 conn_deferrable = 'unavailable'
1737
1738 if cursor.query is None:
1739 query = '<no query>'
1740 else:
1741
1742 query = cursor.query
1743
1744 txt = """Link state:
1745 Cursor
1746 identity: %s; name: %s
1747 closed: %s; scrollable: %s; with hold: %s; arraysize: %s; itersize: %s;
1748 last rowcount: %s; rownumber: %s; lastrowid (OID): %s;
1749 last description: %s
1750 statusmessage: %s
1751 Connection
1752 identity: %s; backend pid: %s; protocol version: %s;
1753 closed: %s; autocommit: %s; isolation level: %s; encoding: %s; async: %s; deferrable: %s; readonly: %s;
1754 TX status: %s; CX status: %s; executing async op: %s;
1755 Query
1756 %s
1757 """ % (
1758 id(cursor),
1759 cursor.name,
1760 cursor.closed,
1761 cursor.scrollable,
1762 cursor.withhold,
1763 cursor.arraysize,
1764 cursor.itersize,
1765 cursor.rowcount,
1766 cursor.rownumber,
1767 cursor.lastrowid,
1768 cursor.description,
1769 cursor.statusmessage,
1770
1771 id(conn),
1772 conn.get_backend_pid(),
1773 conn.protocol_version,
1774 conn.closed,
1775 conn.autocommit,
1776 isolation_level,
1777 conn.encoding,
1778 conn.async_,
1779 conn_deferrable,
1780 conn.readonly,
1781 map_psyco_tx_status2str[tx_status],
1782 map_psyco_conn_status2str[conn.status],
1783 conn.isexecuting(),
1784
1785 query
1786 )
1787 return txt
1788
1789
1790 -def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True, get_col_idx=False):
1791 """Run read-only queries.
1792
1793 <queries> must be a list of dicts:
1794 [
1795 {'cmd': <string>, 'args': <dict> or <tuple>},
1796 {...},
1797 ...
1798 ]
1799 """
1800 if isinstance(link_obj, dbapi._psycopg.cursor):
1801 curs = link_obj
1802 curs_close = lambda :1
1803 tx_rollback = lambda :1
1804 readonly_rollback_just_in_case = lambda :1
1805 elif isinstance(link_obj, dbapi._psycopg.connection):
1806 curs = link_obj.cursor()
1807 curs_close = curs.close
1808 tx_rollback = link_obj.rollback
1809 if link_obj.autocommit is True:
1810 readonly_rollback_just_in_case = link_obj.rollback
1811 else:
1812
1813
1814
1815 readonly_rollback_just_in_case = lambda :1
1816 elif link_obj is None:
1817 conn = get_connection(readonly=True, verbose=verbose)
1818 curs = conn.cursor()
1819 curs_close = curs.close
1820 tx_rollback = conn.rollback
1821 readonly_rollback_just_in_case = conn.rollback
1822 else:
1823 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj)
1824
1825 if verbose:
1826 _log.debug('cursor: %s', curs)
1827
1828 for query in queries:
1829 try:
1830 args = query['args']
1831 except KeyError:
1832 args = None
1833 try:
1834 curs.execute(query['cmd'], args)
1835 if verbose:
1836 _log.debug(capture_cursor_state(curs))
1837 except dbapi.Error as pg_exc:
1838 _log.error('query failed in RO connection')
1839 _log.error(capture_cursor_state(curs))
1840 if hasattr(pg_exc, 'diag'):
1841 for prop in dir(pg_exc.diag):
1842 if prop.startswith('__'):
1843 continue
1844 val = getattr(pg_exc.diag, prop)
1845 if val is None:
1846 continue
1847 _log.error('PG diags %s: %s', prop, val)
1848 pg_exc = make_pg_exception_fields_unicode(pg_exc)
1849 _log.error('PG error code: %s', pg_exc.pgcode)
1850 if pg_exc.pgerror is not None:
1851 _log.error('PG error message: %s', pg_exc.u_pgerror)
1852 try:
1853 curs_close()
1854 except dbapi.InterfaceError:
1855 _log.exception('cannot close cursor')
1856 tx_rollback()
1857 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE:
1858 details = 'Query: [%s]' % curs.query.strip().strip('\n').strip().strip('\n')
1859 if curs.statusmessage != '':
1860 details = 'Status: %s\n%s' % (
1861 curs.statusmessage.strip().strip('\n').strip().strip('\n'),
1862 details
1863 )
1864 if pg_exc.pgerror is None:
1865 msg = '[%s]' % pg_exc.pgcode
1866 else:
1867 msg = '[%s]: %s' % (pg_exc.pgcode, pg_exc.u_pgerror)
1868 raise gmExceptions.AccessDenied (
1869 msg,
1870 source = 'PostgreSQL',
1871 code = pg_exc.pgcode,
1872 details = details
1873 )
1874 raise
1875 except:
1876 _log.exception('query failed in RO connection')
1877 _log.error(capture_cursor_state(curs))
1878 try:
1879 curs_close()
1880 except dbapi.InterfaceError:
1881 _log.exception('cannot close cursor')
1882 tx_rollback()
1883 raise
1884
1885 data = None
1886 col_idx = None
1887 if return_data:
1888 data = curs.fetchall()
1889 if verbose:
1890 _log.debug('last query returned [%s (%s)] rows', curs.rowcount, len(data))
1891 _log.debug('cursor description: %s', curs.description)
1892 if get_col_idx:
1893 col_idx = get_col_indices(curs)
1894
1895 curs_close()
1896
1897
1898
1899 readonly_rollback_just_in_case()
1900 return (data, col_idx)
1901
1902
1903 -def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None, get_col_idx=False, verbose=False):
1904 """Convenience function for running a transaction
1905 that is supposed to get committed.
1906
1907 <link_obj>
1908 can be either:
1909 - a cursor
1910 - a connection
1911
1912 <queries>
1913 is a list of dicts [{'cmd': <string>, 'args': <dict> or <tuple>)
1914 to be executed as a single transaction, the last
1915 query may usefully return rows (such as a
1916 "SELECT currval('some_sequence')" statement)
1917
1918 <end_tx>
1919 - controls whether the transaction is finalized (eg.
1920 committed/rolled back) or not, this allows the
1921 call to run_rw_queries() to be part of a framing
1922 transaction
1923 - if link_obj is a connection then <end_tx> will
1924 default to False unless it is explicitly set to
1925 True which is taken to mean "yes, you do have full
1926 control over the transaction" in which case the
1927 transaction is properly finalized
1928 - if link_obj is a cursor we CANNOT finalize the
1929 transaction because we would need the connection for that
1930 - if link_obj is None <end_tx> will, of course, always be True
1931
1932 <return_data>
1933 - if true, the returned data will include the rows
1934 the last query selected
1935 - if false, it returns None instead
1936
1937 <get_col_idx>
1938 - if true, the returned data will include a dictionary
1939 mapping field names to column positions
1940 - if false, the returned data returns None instead
1941
1942 method result:
1943 - returns a tuple (data, idx)
1944 - <data>:
1945 * (None, None) if last query did not return rows
1946 * ("fetchall() result", <index>) if last query returned any rows
1947 * for <index> see <get_col_idx>
1948 """
1949 if isinstance(link_obj, dbapi._psycopg.cursor):
1950 conn_close = lambda :1
1951 conn_commit = lambda :1
1952 tx_rollback = lambda :1
1953 curs = link_obj
1954 curs_close = lambda :1
1955 notices_accessor = curs.connection
1956 elif isinstance(link_obj, dbapi._psycopg.connection):
1957 conn_close = lambda :1
1958 if end_tx:
1959 conn_commit = link_obj.commit
1960 tx_rollback = link_obj.rollback
1961 else:
1962 conn_commit = lambda :1
1963 tx_rollback = lambda :1
1964 curs = link_obj.cursor()
1965 curs_close = curs.close
1966 notices_accessor = link_obj
1967 elif link_obj is None:
1968 conn = get_connection(readonly=False)
1969 conn_close = conn.close
1970 conn_commit = conn.commit
1971 tx_rollback = conn.rollback
1972 curs = conn.cursor()
1973 curs_close = curs.close
1974 notices_accessor = conn
1975 else:
1976 raise ValueError('link_obj must be cursor, connection or None but not [%s]' % link_obj)
1977
1978 for query in queries:
1979 try:
1980 args = query['args']
1981 except KeyError:
1982 args = None
1983 try:
1984 curs.execute(query['cmd'], args)
1985 if verbose:
1986 _log.debug(capture_cursor_state(curs))
1987 for notice in notices_accessor.notices:
1988 _log.debug(notice.replace('\n', '/').replace('\n', '/'))
1989 del notices_accessor.notices[:]
1990
1991 except dbapi.Error as pg_exc:
1992 _log.error('query failed in RW connection')
1993 _log.error(capture_cursor_state(curs))
1994 if hasattr(pg_exc, 'diag'):
1995 for prop in dir(pg_exc.diag):
1996 if prop.startswith('__'):
1997 continue
1998 val = getattr(pg_exc.diag, prop)
1999 if val is None:
2000 continue
2001 _log.error('PG diags %s: %s', prop, val)
2002 for notice in notices_accessor.notices:
2003 _log.debug(notice.replace('\n', '/').replace('\n', '/'))
2004 del notices_accessor.notices[:]
2005 pg_exc = make_pg_exception_fields_unicode(pg_exc)
2006 _log.error('PG error code: %s', pg_exc.pgcode)
2007 if pg_exc.pgerror is not None:
2008 _log.error('PG error message: %s', pg_exc.u_pgerror)
2009
2010 if pg_exc.pgcode == sql_error_codes.INSUFFICIENT_PRIVILEGE:
2011 details = 'Query: [%s]' % curs.query.strip().strip('\n').strip().strip('\n')
2012 if curs.statusmessage != '':
2013 details = 'Status: %s\n%s' % (
2014 curs.statusmessage.strip().strip('\n').strip().strip('\n'),
2015 details
2016 )
2017 if pg_exc.pgerror is None:
2018 msg = '[%s]' % pg_exc.pgcode
2019 else:
2020 msg = '[%s]: %s' % (pg_exc.pgcode, pg_exc.u_pgerror)
2021 try:
2022 curs_close()
2023 tx_rollback()
2024 conn_close()
2025 except dbapi.InterfaceError:
2026 _log.exception('cannot cleanup')
2027 raise gmExceptions.AccessDenied (
2028 msg,
2029 source = 'PostgreSQL',
2030 code = pg_exc.pgcode,
2031 details = details
2032 )
2033
2034 gmLog2.log_stack_trace()
2035 try:
2036 curs_close()
2037 tx_rollback()
2038 conn_close()
2039 except dbapi.InterfaceError:
2040 _log.exception('cannot cleanup')
2041 raise
2042
2043 except:
2044 _log.exception('error running query in RW connection')
2045 _log.error(capture_cursor_state(curs))
2046 for notice in notices_accessor.notices:
2047 _log.debug(notice.replace('\n', '/').replace('\n', '/'))
2048 del notices_accessor.notices[:]
2049 gmLog2.log_stack_trace()
2050 try:
2051 curs_close()
2052 tx_rollback()
2053 conn_close()
2054 except dbapi.InterfaceError:
2055 _log.exception('cannot cleanup')
2056 raise
2057
2058 data = None
2059 col_idx = None
2060 if return_data:
2061 try:
2062 data = curs.fetchall()
2063 except:
2064 _log.exception('error fetching data from RW query')
2065 gmLog2.log_stack_trace()
2066 try:
2067 curs_close()
2068 tx_rollback()
2069 conn_close()
2070 except dbapi.InterfaceError:
2071 _log.exception('cannot cleanup')
2072 raise
2073 raise
2074 if get_col_idx:
2075 col_idx = get_col_indices(curs)
2076
2077 curs_close()
2078 conn_commit()
2079 conn_close()
2080
2081 return (data, col_idx)
2082
2083
2084 -def run_insert(link_obj=None, schema=None, table=None, values=None, returning=None, end_tx=False, get_col_idx=False, verbose=False):
2085 """Generates SQL for an INSERT query.
2086
2087 values: dict of values keyed by field to insert them into
2088 """
2089 if schema is None:
2090 schema = 'public'
2091
2092 fields = values.keys()
2093 val_snippets = []
2094 for field in fields:
2095 val_snippets.append('%%(%s)s' % field)
2096
2097 if returning is None:
2098 returning = ''
2099 return_data = False
2100 else:
2101 returning = '\n\tRETURNING\n\t\t%s' % ', '.join(returning)
2102 return_data = True
2103
2104 cmd = """\nINSERT INTO %s.%s (
2105 %s
2106 ) VALUES (
2107 %s
2108 )%s""" % (
2109 schema,
2110 table,
2111 ',\n\t\t'.join(fields),
2112 ',\n\t\t'.join(val_snippets),
2113 returning
2114 )
2115
2116 _log.debug('running SQL: >>>%s<<<', cmd)
2117
2118 return run_rw_queries (
2119 link_obj = link_obj,
2120 queries = [{'cmd': cmd, 'args': values}],
2121 end_tx = end_tx,
2122 return_data = return_data,
2123 get_col_idx = get_col_idx,
2124 verbose = verbose
2125 )
2126
2127
2128
2129
2131 """GNUmed database connection pool.
2132
2133 Extends psycopg2's ThreadedConnectionPool with
2134 a custom _connect() function. Supports one connection
2135 per thread - which also ties it to one particular DSN."""
2136
2138 _log.debug('conn request with key [%s]', key)
2139 conn = get_raw_connection(dsn = self._kwargs['dsn'], verbose = self._kwargs['verbose'], readonly = True)
2140
2141 conn.original_close = conn.close
2142 conn.close = _raise_exception_on_ro_conn_close
2143 if key is not None:
2144 self._used[key] = conn
2145 self._rused[id(conn)] = key
2146 else:
2147 self._pool.append(conn)
2148 return conn
2149
2150
2152 if key is None:
2153 key = threading.current_thread().ident
2154 try:
2155 conn = self._used[key]
2156 except KeyError:
2157 _log.error('no such key in connection pool: %s', key)
2158 _log.debug('available keys: %s', self._used.keys())
2159 return
2160 del self._used[key]
2161 del self._rused[id(conn)]
2162 conn.original_close()
2163
2164
2166 for conn_key in self._used.keys():
2167 conn = self._used[conn_key]
2168 if conn.closed != 0:
2169 continue
2170 _log.debug('closing pooled database connection, pool key: %s, backend PID: %s', conn_key, self._used[conn_key].get_backend_pid())
2171 conn.original_close()
2172
2173
2174 -def get_raw_connection(dsn=None, verbose=False, readonly=True, connection_name=None, autocommit=False):
2175 """Get a raw, unadorned connection.
2176
2177 - this will not set any parameters such as encoding, timezone, datestyle
2178 - the only requirement is a valid DSN
2179 - hence it can be used for "service" connections
2180 for verifying encodings etc
2181 """
2182
2183 if dsn is None:
2184 dsn = get_default_dsn()
2185
2186 if 'host=salaam.homeunix' in dsn:
2187 raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.')
2188
2189
2190
2191
2192 if ' client_encoding=' not in dsn:
2193 dsn += ' client_encoding=utf8'
2194
2195 if ' application_name' not in dsn:
2196 if connection_name is None:
2197 dsn += " application_name=GNUmed-[%s]" % threading.current_thread().name.replace(' ', '_')
2198 else:
2199 dsn += " application_name=%s" % connection_name
2200
2201 try:
2202
2203 conn = dbapi.connect(dsn = dsn, connection_factory = psycopg2.extras.DictConnection)
2204 except dbapi.OperationalError as e:
2205 t, v, tb = sys.exc_info()
2206 try:
2207 msg = e.args[0]
2208 except (AttributeError, IndexError, TypeError):
2209 raise
2210 if 'fe_sendauth' in msg:
2211 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2212 if regex.search('user ".*" does not exist', msg) is not None:
2213 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2214 if (( (regex.search('user ".*"', msg) is not None)
2215 or
2216 (regex.search('(R|r)ol{1,2}e', msg) is not None)
2217 )
2218 and ('exist' in msg)
2219 and (regex.search('n(o|ich)t', msg) is not None)
2220 ):
2221 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2222 if regex.search('user ".*" does not exist', msg) is not None:
2223 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2224 if 'uthenti' in msg:
2225 raise cAuthenticationError(dsn, msg).with_traceback(tb)
2226 raise
2227
2228 if connection_name is None:
2229 _log.debug('established anonymous database connection, backend PID: %s', conn.get_backend_pid())
2230 else:
2231 _log.debug('established database connection "%s", backend PID: %s', connection_name, conn.get_backend_pid())
2232
2233
2234
2235 global postgresql_version
2236 if postgresql_version is None:
2237 curs = conn.cursor()
2238 curs.execute("""
2239 SELECT
2240 substring(setting, E'^\\\\d{1,2}\\\\.\\\\d{1,2}')::numeric AS version
2241 FROM
2242 pg_settings
2243 WHERE
2244 name = 'server_version'
2245 """)
2246 postgresql_version = curs.fetchone()['version']
2247 _log.info('PostgreSQL version (numeric): %s' % postgresql_version)
2248 try:
2249 curs.execute("SELECT pg_size_pretty(pg_database_size(current_database()))")
2250 _log.info('database size: %s', curs.fetchone()[0])
2251 except BaseException:
2252 _log.exception('cannot get database size')
2253 finally:
2254 curs.close()
2255 conn.commit()
2256 if verbose:
2257 curs = conn.cursor()
2258 _log_PG_settings(curs = curs)
2259 curs.close()
2260
2261 if _default_client_timezone is None:
2262 __detect_client_timezone(conn = conn)
2263
2264
2265 if readonly:
2266 _log.debug('readonly: forcing autocommit=True to avoid <IDLE IN TRANSACTION>')
2267 autocommit = True
2268 else:
2269 _log.debug('autocommit is desired to be: %s', autocommit)
2270
2271 conn.commit()
2272 conn.autocommit = autocommit
2273 conn.readonly = readonly
2274
2275
2276 if verbose:
2277 _log.debug('enabling <plpgsql.extra_warnings/_errors>')
2278 curs = conn.cursor()
2279 try:
2280 curs.execute("SET plpgsql.extra_warnings TO 'all'")
2281 curs.execute("SET plpgsql.extra_errors TO 'all'")
2282 except BaseException:
2283 _log.exception('cannot enable <plpgsql.extra_warnings/_errors>')
2284 finally:
2285 curs.close()
2286 conn.commit()
2287
2288 conn.is_decorated = False
2289 return conn
2290
2291
2292 -def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True, connection_name=None, autocommit=False):
2293 """Get a new connection.
2294
2295 This assumes the locale system has been initialized
2296 unless an encoding is specified.
2297 """
2298
2299
2300 if pooled and readonly and (dsn is None):
2301 global __ro_conn_pool
2302 if __ro_conn_pool is None:
2303 log_ro_conn = True
2304 __ro_conn_pool = cConnectionPool (
2305 minconn = 1,
2306 maxconn = 2,
2307 dsn = dsn,
2308 verbose = verbose
2309 )
2310 else:
2311 log_ro_conn = False
2312 try:
2313 conn = __ro_conn_pool.getconn()
2314 except psycopg2.pool.PoolError:
2315 _log.exception('falling back to non-pooled connection')
2316 conn = get_raw_connection(dsn = dsn, verbose = verbose, readonly = readonly, connection_name = connection_name, autocommit = autocommit)
2317 log_ro_conn = True
2318 if log_ro_conn:
2319 [ _log.debug(line) for line in capture_conn_state(conn = conn).split('\n') ]
2320 else:
2321 conn = get_raw_connection(dsn = dsn, verbose = verbose, readonly = readonly, connection_name = connection_name, autocommit = autocommit)
2322
2323 if conn.is_decorated:
2324 return conn
2325
2326 if encoding is None:
2327 encoding = _default_client_encoding
2328 if encoding is None:
2329 encoding = gmI18N.get_encoding()
2330 _log.warning('client encoding not specified')
2331 _log.warning('the string encoding currently set in the active locale is used: [%s]' % encoding)
2332 _log.warning('for this to work properly the application MUST have called locale.setlocale() before')
2333
2334
2335
2336 try:
2337 conn.set_client_encoding(encoding)
2338 except dbapi.DataError:
2339 t, v, tb = sys.exc_info()
2340
2341 if 'cannot set encoding to' in str(v):
2342 raise cEncodingError(encoding, v).with_traceback(tb)
2343 if 'invalid value for parameter "client_encoding"' in str(v):
2344 raise cEncodingError(encoding, v).with_traceback(tb)
2345 raise
2346
2347
2348 if readonly:
2349
2350 pass
2351 else:
2352 conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE)
2353
2354 _log.debug('client time zone [%s]', _default_client_timezone)
2355
2356
2357 curs = conn.cursor()
2358 curs.execute(_sql_set_timezone, [_default_client_timezone])
2359 curs.close()
2360 conn.commit()
2361
2362 conn.is_decorated = True
2363
2364 if verbose:
2365 [ _log.debug(line) for line in capture_conn_state(conn = conn).split('\n') ]
2366
2367 return conn
2368
2369
2374
2375
2380
2381
2382
2383
2386
2387
2389 raise TypeError('close() called on read-only connection')
2390
2391
2393 run_insert (
2394 schema = 'gm',
2395 table = 'access_log',
2396 values = {'user_action': action},
2397 end_tx = True
2398 )
2399
2400
2402 """Check server time and local time to be within
2403 the given tolerance of each other.
2404
2405 tolerance: seconds
2406 """
2407 _log.debug('maximum skew tolerance (seconds): %s', tolerance)
2408
2409 cmd = "SELECT now() at time zone 'UTC'"
2410 conn = get_raw_connection(readonly=True)
2411 curs = conn.cursor()
2412
2413 start = time.time()
2414 rows, idx = run_ro_queries(link_obj = curs, queries = [{'cmd': cmd}])
2415 end = time.time()
2416 client_now_as_utc = pydt.datetime.utcnow()
2417
2418 curs.close()
2419 conn.commit()
2420
2421 server_now_as_utc = rows[0][0]
2422 query_duration = end - start
2423 _log.info('server "now" (UTC): %s', server_now_as_utc)
2424 _log.info('client "now" (UTC): %s', client_now_as_utc)
2425 _log.debug('wire roundtrip (seconds): %s', query_duration)
2426
2427 if query_duration > tolerance:
2428 _log.error('useless to check client/server time skew, wire roundtrip > tolerance')
2429 return False
2430
2431 if server_now_as_utc > client_now_as_utc:
2432 real_skew = server_now_as_utc - client_now_as_utc
2433 else:
2434 real_skew = client_now_as_utc - server_now_as_utc
2435
2436 _log.debug('client/server time skew: %s', real_skew)
2437
2438 if real_skew > pydt.timedelta(seconds = tolerance):
2439 _log.error('client/server time skew > tolerance')
2440 return False
2441
2442 return True
2443
2444
2446 """Checks database settings.
2447
2448 returns (status, message)
2449 status:
2450 0: no problem
2451 1: non-fatal problem
2452 2: fatal problem
2453 """
2454 _log.debug('checking database settings')
2455
2456 conn = get_connection()
2457
2458
2459 global postgresql_version_string
2460 if postgresql_version_string is None:
2461 curs = conn.cursor()
2462 curs.execute('SELECT version()')
2463 postgresql_version_string = curs.fetchone()['version']
2464 curs.close()
2465 _log.info('PostgreSQL version (string): "%s"' % postgresql_version_string)
2466
2467 options2check = {
2468
2469 'allow_system_table_mods': [['off'], 'system breakage', False],
2470 'check_function_bodies': [['on'], 'suboptimal error detection', False],
2471 'datestyle': [['ISO'], 'faulty timestamp parsing', True],
2472 'default_transaction_isolation': [['read committed'], 'faulty database reads', True],
2473 'default_transaction_read_only': [['on'], 'accidental database writes', False],
2474 'fsync': [['on'], 'data loss/corruption', True],
2475 'full_page_writes': [['on'], 'data loss/corruption', False],
2476 'lc_messages': [['C'], 'suboptimal error detection', False],
2477 'password_encryption': [['on', 'md5', 'scram-sha-256'], 'breach of confidentiality', False],
2478
2479 'synchronous_commit': [['on'], 'data loss/corruption', False],
2480 'sql_inheritance': [['on'], 'query breakage, data loss/corruption', True],
2481 'ignore_checksum_failure': [['off'], 'data loss/corruption', False],
2482 'track_commit_timestamp': [['on'], 'suboptimal auditing', False]
2483 }
2484
2485 from Gnumed.pycommon import gmCfg2
2486 _cfg = gmCfg2.gmCfgData()
2487 if _cfg.get(option = 'hipaa'):
2488 options2check['log_connections'] = [['on'], 'non-compliance with HIPAA', True]
2489 options2check['log_disconnections'] = [['on'], 'non-compliance with HIPAA', True]
2490 else:
2491 options2check['log_connections'] = [['on'], 'non-compliance with HIPAA', None]
2492 options2check['log_disconnections'] = [['on'], 'non-compliance with HIPAA', None]
2493
2494 cmd = "SELECT name, setting from pg_settings where name in %(settings)s"
2495 rows, idx = run_ro_queries (
2496 link_obj = conn,
2497 queries = [{'cmd': cmd, 'args': {'settings': tuple(options2check.keys())}}],
2498 get_col_idx = False
2499 )
2500
2501 found_error = False
2502 found_problem = False
2503 msg = []
2504 for row in rows:
2505 option = row['name']
2506 value_found = row['setting']
2507 values_expected = options2check[option][0]
2508 risk = options2check[option][1]
2509 fatal_setting = options2check[option][2]
2510 if value_found not in values_expected:
2511 if fatal_setting is True:
2512 found_error = True
2513 elif fatal_setting is False:
2514 found_problem = True
2515 elif fatal_setting is None:
2516 pass
2517 else:
2518 _log.error(options2check[option])
2519 raise ValueError('invalid database configuration sanity check')
2520 msg.append(_(' option [%s]: %s') % (option, value_found))
2521 msg.append(_(' risk: %s') % risk)
2522 _log.warning('PG option [%s] set to [%s], expected %s, risk: <%s>' % (option, value_found, values_expected, risk))
2523
2524 if found_error:
2525 return 2, '\n'.join(msg)
2526
2527 if found_problem:
2528 return 1, '\n'.join(msg)
2529
2530 return 0, ''
2531
2532
2534
2535
2536 try:
2537
2538
2539 curs.execute('SELECT name, setting, unit, source, reset_val, sourcefile, sourceline FROM pg_settings')
2540 except:
2541 _log.exception('cannot log PG settings ("SELECT ... FROM pg_settings" failed)')
2542 return False
2543 settings = curs.fetchall()
2544 for setting in settings:
2545 if setting['unit'] is None:
2546 unit = ''
2547 else:
2548 unit = ' %s' % setting['unit']
2549 if setting['sourcefile'] is None:
2550 sfile = ''
2551 else:
2552 sfile = '// %s @ %s' % (setting['sourcefile'], setting['sourceline'])
2553
2554
2555
2556
2557
2558
2559 _log.debug('%s: %s%s (set from: [%s] // sess RESET will set to: [%s]%s)',
2560 setting['name'],
2561 setting['setting'],
2562 unit,
2563 setting['source'],
2564 setting['reset_val'],
2565
2566 sfile
2567 )
2568
2569 try:
2570 curs.execute('select pg_available_extensions()')
2571 except:
2572 _log.exception('cannot log available PG extensions')
2573 return False
2574 extensions = curs.fetchall()
2575 if extensions is None:
2576 _log.error('no PG extensions available')
2577 return False
2578 for ext in extensions:
2579 _log.debug('PG extension: %s', ext['pg_available_extensions'])
2580
2581
2582
2583
2584
2585
2586
2587
2588
2589
2590
2591
2592 return True
2593
2594
2596
2597 if not isinstance(exc, dbapi.Error):
2598 return exc
2599
2600 if exc.pgerror is None:
2601 try:
2602 msg = exc.args[0]
2603 except (AttributeError, IndexError, TypeError):
2604 return exc
2605
2606 exc.u_pgerror = msg
2607 return exc
2608
2609
2610 exc.u_pgerror = exc.pgerror.strip().strip('\n').strip().strip('\n')
2611
2612 return exc
2613
2614
2616
2617 try:
2618 return '%s' % exc.args[0]
2619 except (AttributeError, IndexError, TypeError):
2620 return 'cannot extract message from exception'
2621
2622
2624
2625 - def __init__(self, dsn=None, prev_val=None):
2626 self.dsn = dsn
2627 self.prev_val = prev_val
2628
2630 return 'PostgreSQL: %sDSN: %s' % (self.prev_val, self.dsn)
2631
2632
2633
2634
2636
2637 - def __init__(self, encoding=None, prev_val=None):
2638 self.encoding = encoding
2639 self.prev_val = prev_val
2640
2642 return 'PostgreSQL: %s\nencoding: %s' % (self.prev_val, self.encoding)
2643
2644
2645
2646
2647
2649
2651 if dt.tzinfo is None:
2652 raise ValueError('datetime.datetime instance is lacking a time zone: [%s]' % _timestamp_template % dt.isoformat())
2653 self.__dt = dt
2654
2657
2658
2659
2660
2661
2662
2663 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
2664 psycopg2.extensions.register_type(psycopg2._psycopg.UNICODEARRAY)
2665
2666
2667
2668 psycopg2.extensions.register_adapter(pydt.datetime, cAdapterPyDateTime)
2669
2670
2671
2672
2673
2674
2675
2676
2677 if __name__ == "__main__":
2678
2679 if len(sys.argv) < 2:
2680 sys.exit()
2681
2682 if sys.argv[1] != 'test':
2683 sys.exit()
2684
2685 from Gnumed.pycommon.gmTools import file2md5
2686
2687 logging.basicConfig(level=logging.DEBUG)
2688
2689
2691 run_rw_queries(queries = [
2692 {'cmd': 'drop table if exists test_bytea'},
2693 {'cmd': 'create table test_bytea (data bytea)'}
2694 ])
2695
2696 try:
2697 file2bytea(query = 'insert into test_bytea values (%(data)s::bytea) returning md5(data) as md5', filename = sys.argv[2], file_md5 = file2md5(sys.argv[2], True))
2698 except:
2699 _log.exception('error')
2700
2701 run_rw_queries(queries = [
2702 {'cmd': 'drop table test_bytea'}
2703 ])
2704
2705
2712
2713
2714
2715
2716
2717
2719
2720 run_rw_queries(queries = [
2721 {'cmd': 'drop table if exists test_bytea'},
2722 {'cmd': 'create table test_bytea (pk serial primary key, data bytea)'},
2723 {'cmd': "insert into test_bytea (data) values (NULL::bytea)"}
2724 ])
2725
2726 md5_query = {
2727 'cmd': 'select md5(data) AS md5 FROM test_bytea WHERE pk = %(pk)s',
2728 'args': {'pk': 1}
2729 }
2730
2731 file2bytea_copy_from (
2732 table = 'test_bytea',
2733 columns = ['data'],
2734 filename = sys.argv[2],
2735 md5_query = md5_query,
2736 file_md5 = file2md5(sys.argv[2], True)
2737 )
2738
2739 run_rw_queries(queries = [
2740 {'cmd': 'drop table if exists test_bytea'}
2741 ])
2742
2743
2745
2746 run_rw_queries(queries = [
2747 {'cmd': 'drop table if exists test_bytea'},
2748 {'cmd': 'create table test_bytea (pk serial primary key, data bytea)'},
2749 {'cmd': "insert into test_bytea (data) values (NULL::bytea)"}
2750 ])
2751
2752 cmd = """
2753 update test_bytea
2754 set data = overlay (
2755 coalesce(data, ''::bytea)
2756 placing %(data)s::bytea
2757 from %(start)s
2758 for %(size)s
2759 )
2760 where
2761 pk > %(pk)s
2762 """
2763 md5_cmd = 'select md5(data) from test_bytea'
2764 args = {'pk': 0}
2765 file2bytea_overlay (
2766 query = cmd,
2767 args = args,
2768 filename = sys.argv[2],
2769 conn = None,
2770 md5_query = md5_cmd,
2771 file_md5 = file2md5(sys.argv[2], True)
2772 )
2773
2774 run_rw_queries(queries = [
2775 {'cmd': 'drop table test_bytea'}
2776 ])
2777
2778
2780 print("testing get_connection()")
2781
2782 dsn = 'foo'
2783 try:
2784 conn = get_connection(dsn=dsn)
2785 except dbapi.ProgrammingError as e:
2786 print("1) SUCCESS: get_connection(%s) failed as expected" % dsn)
2787 t, v = sys.exc_info()[:2]
2788 print (' ', t)
2789 print (' ', v)
2790
2791 dsn = 'dbname=gnumed_v9'
2792 try:
2793 conn = get_connection(dsn=dsn)
2794 except cAuthenticationError:
2795 print("2) SUCCESS: get_connection(%s) failed as expected" % dsn)
2796 t, v = sys.exc_info()[:2]
2797 print(' ', t)
2798 print(' ', v)
2799
2800 dsn = 'dbname=gnumed_v9 user=abc'
2801 try:
2802 conn = get_connection(dsn=dsn)
2803 except cAuthenticationError:
2804 print("3) SUCCESS: get_connection(%s) failed as expected" % dsn)
2805 t, v = sys.exc_info()[:2]
2806 print(' ', t)
2807 print(' ', v)
2808
2809 dsn = 'dbname=gnumed_v22 user=any-doc'
2810 try:
2811 conn = get_connection(dsn=dsn)
2812 print("4) SUCCESS:", dsn)
2813 except cAuthenticationError:
2814 print("4) SUCCESS: get_connection(%s) failed as expected" % dsn)
2815 t, v = sys.exc_info()[:2]
2816 print(' ', t)
2817 print(' ', v)
2818
2819 dsn = 'dbname=gnumed_v9 user=any-doc password=abc'
2820 try:
2821 conn = get_connection(dsn=dsn)
2822 except cAuthenticationError:
2823 print("5) SUCCESS: get_connection(%s) failed as expected" % dsn)
2824 t, v = sys.exc_info()[:2]
2825 print(' ', t)
2826 print(' ', v)
2827
2828 dsn = 'dbname=gnumed_v22 user=any-doc password=any-doc'
2829 conn = get_connection(dsn=dsn, readonly=True)
2830
2831 dsn = 'dbname=gnumed_v22 user=any-doc password=any-doc'
2832 conn = get_connection(dsn=dsn, readonly=False, verbose=True)
2833
2834 dsn = 'dbname=gnumed_v22 user=any-doc password=any-doc'
2835 encoding = 'foo'
2836 try:
2837 conn = get_connection(dsn=dsn, encoding=encoding)
2838 except cEncodingError:
2839 print("6) SUCCESS: get_connection(%s, %s) failed as expected" % (dsn, encoding))
2840 t, v = sys.exc_info()[:2]
2841 print(' ', t)
2842 print(' ', v)
2843
2844
2846 print("testing exceptions")
2847
2848 try:
2849 raise cAuthenticationError('no dsn', 'no previous exception')
2850 except cAuthenticationError:
2851 t, v, tb = sys.exc_info()
2852 print(t)
2853 print(v)
2854 print(tb)
2855
2856 try:
2857 raise cEncodingError('no dsn', 'no previous exception')
2858 except cEncodingError:
2859 t, v, tb = sys.exc_info()
2860 print(t)
2861 print(v)
2862 print(tb)
2863
2865 print("testing run_ro_queries()")
2866
2867 dsn = 'dbname=gnumed_v9 user=any-doc password=any-doc'
2868 conn = get_connection(dsn, readonly=True)
2869
2870 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': 'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True)
2871 print(data)
2872 print(idx)
2873 data, idx = run_ro_queries(link_obj=conn, queries=[{'cmd': 'SELECT 1'}], return_data=True, get_col_idx=True)
2874 print(data)
2875 print(idx)
2876
2877 curs = conn.cursor()
2878
2879 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': 'SELECT version()'}], return_data=True, get_col_idx=True, verbose=True)
2880 print(data)
2881 print(idx)
2882
2883 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': 'SELECT 1'}], return_data=True, get_col_idx=True, verbose=True)
2884 print(data)
2885 print(idx)
2886
2887 try:
2888 data, idx = run_ro_queries(link_obj=curs, queries=[{'cmd': 'selec 1'}], return_data=True, get_col_idx=True, verbose=True)
2889 print(data)
2890 print(idx)
2891 except psycopg2.ProgrammingError:
2892 print('SUCCESS: run_ro_queries("selec 1") failed as expected')
2893 t, v = sys.exc_info()[:2]
2894 print(' ', t)
2895 print(' ', v)
2896
2897 curs.close()
2898
2903
2905 print("testing set_default_client_encoding()")
2906
2907 enc = 'foo'
2908 try:
2909 set_default_client_encoding(enc)
2910 print("SUCCESS: encoding [%s] worked" % enc)
2911 except ValueError:
2912 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2913 t, v = sys.exc_info()[:2]
2914 print(' ', t)
2915 print(' ', v)
2916
2917 enc = ''
2918 try:
2919 set_default_client_encoding(enc)
2920 print("SUCCESS: encoding [%s] worked" % enc)
2921 except ValueError:
2922 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2923 t, v = sys.exc_info()[:2]
2924 print(' ', t)
2925 print(' ', v)
2926
2927 enc = 'latin1'
2928 try:
2929 set_default_client_encoding(enc)
2930 print("SUCCESS: encoding [%s] worked" % enc)
2931 except ValueError:
2932 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2933 t, v = sys.exc_info()[:2]
2934 print(' ', t)
2935 print(' ', v)
2936
2937 enc = 'utf8'
2938 try:
2939 set_default_client_encoding(enc)
2940 print("SUCCESS: encoding [%s] worked" % enc)
2941 except ValueError:
2942 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2943 t, v = sys.exc_info()[:2]
2944 print(' ', t)
2945 print(' ', v)
2946
2947 enc = 'unicode'
2948 try:
2949 set_default_client_encoding(enc)
2950 print("SUCCESS: encoding [%s] worked" % enc)
2951 except ValueError:
2952 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2953 t, v = sys.exc_info()[:2]
2954 print(' ', t)
2955 print(' ', v)
2956
2957 enc = 'UNICODE'
2958 try:
2959 set_default_client_encoding(enc)
2960 print("SUCCESS: encoding [%s] worked" % enc)
2961 except ValueError:
2962 print("SUCCESS: set_default_client_encoding(%s) failed as expected" % enc)
2963 t, v = sys.exc_info()[:2]
2964 print(' ', t)
2965 print(' ', v)
2966
2975
2977 dsn = get_default_dsn()
2978 conn = get_connection(dsn, readonly=True)
2979 curs = conn.cursor()
2980 curs.execute('SELECT * from clin.clin_narrative where narrative = %s', ['a'])
2981
2983 tests = [
2984 ['(', '\\(']
2985 , ['[', '\\[']
2986 , [')', '\\)']
2987 ]
2988 for test in tests:
2989 result = sanitize_pg_regex(test[0])
2990 if result != test[1]:
2991 print('ERROR: sanitize_pg_regex(%s) returned "%s", expected "%s"' % (test[0], result, test[1]))
2992
2994 status = True
2995 tests = [
2996 [None, True],
2997 [1, True],
2998 ['1', True],
2999 ['abc', False]
3000 ]
3001
3002 if not is_pg_interval():
3003 print('ERROR: is_pg_interval() returned "False", expected "True"')
3004 status = False
3005
3006 for test in tests:
3007 result = is_pg_interval(test[0])
3008 if result != test[1]:
3009 print('ERROR: is_pg_interval(%s) returned "%s", expected "%s"' % (test[0], result, test[1]))
3010 status = False
3011
3012 return status
3013
3016
3017
3019 print(get_foreign_key_names (
3020 src_schema = 'clin',
3021 src_table = 'vaccination',
3022 src_column = 'fk_episode',
3023 target_schema = 'clin',
3024 target_table = 'episode',
3025 target_column = 'pk'
3026 ))
3027
3028
3030 schema = 'clin'
3031 table = 'episode'
3032 col = 'pk'
3033 print('column %s.%s.%s is referenced by:' % (schema, table, col))
3034 for row in get_foreign_keys2column (
3035 schema = schema,
3036 table = table,
3037 column = col
3038 ):
3039 print(' <- %s.%s' % (
3040 row['referencing_table'],
3041 row['referencing_column']
3042 ))
3043
3044
3046
3047 tests = [
3048
3049 [None, 'de_DE', True],
3050 [None, 'lang_w/o_tx', False],
3051 [None, None, True],
3052
3053 ['any-doc', 'de_DE', True],
3054 ['any-doc', 'lang_w/o_tx', False],
3055 ['any-doc', None, True],
3056
3057 ['invalid user', 'de_DE', None],
3058 ['invalid user', 'lang_w/o_tx', False],
3059 ['invalid user', None, True]
3060 ]
3061 for test in tests:
3062 try:
3063 result = set_user_language(user = test[0], language = test[1])
3064 if result != test[2]:
3065 print("test:", test)
3066 print("result:", result, "expected:", test[2])
3067 except psycopg2.IntegrityError as e:
3068 if test[2] is None:
3069 continue
3070 print("test:", test)
3071 print("expected exception")
3072 print("result:", e)
3073
3074
3076 for line in get_schema_revision_history():
3077 print(' - '.join(line))
3078
3079
3081 gmDateTime.init()
3082 args = {'dt': gmDateTime.pydt_max_here()}
3083 cmd = "SELECT %(dt)s"
3084
3085
3086
3087 cmd = """
3088 SELECT to_timestamp (foofoo,'YYMMDD.HH24MI') FROM (
3089 SELECT REGEXP_REPLACE (
3090 't1.130729.0902.tif', -- string
3091 E'(.1)\.([0-9\.]+)(\.tif)', -- pattern
3092 E'\\\\2' -- replacement
3093 ) AS foofoo
3094 ) AS foo"""
3095 rows, idx = run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
3096 print(rows)
3097 print(rows[0])
3098 print(rows[0][0])
3099
3102
3104 row_is_locked(table = 'dem.identity', pk = 12)
3105
3106 print("1st connection:")
3107 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3108 print(" 1st shared lock succeeded:", lock_row(table = 'dem.identity', pk = 12, exclusive = False))
3109 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3110
3111 print(" 2nd shared lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = False))
3112 print(" `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False))
3113 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3114 print(" exclusive lock should succeed:", lock_row(table = 'dem.identity', pk = 12, exclusive = True))
3115 print(" `-> unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = True))
3116 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3117
3118 print("2nd connection:")
3119 conn = get_raw_connection(readonly=True)
3120 print(" shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3121 print(" `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3122 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3123 print(" exclusive lock succeeded ?", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True), "(should fail)")
3124 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3125
3126 print("1st connection:")
3127 print(" unlock succeeded:", unlock_row(table = 'dem.identity', pk = 12, exclusive = False))
3128 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3129
3130 print("2nd connection:")
3131 print(" exclusive lock should succeed", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = True))
3132 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3133 print(" shared lock should succeed:", lock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3134 print(" `-> unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3135 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3136 print(" unlock succeeded:", unlock_row(link_obj = conn, table = 'dem.identity', pk = 12, exclusive = False))
3137 print(" locked:", row_is_locked(table = 'dem.identity', pk = 12))
3138
3139 conn.close()
3140
3141
3143 print(get_foreign_key_names (
3144 src_schema = 'dem',
3145 src_table = 'names',
3146 src_column = 'id_identity',
3147 target_schema = 'dem',
3148 target_table = 'identity',
3149 target_column = 'pk'
3150 ))
3151
3152
3154 print(get_index_name(indexed_table = 'clin.vaccination', indexed_column = 'fk_episode'))
3155
3156
3159
3160
3164
3165
3166
3167 test_get_connection()
3168
3169
3170
3171
3172
3173
3174
3175
3176
3177
3178
3179
3180
3181
3182
3183
3184
3185
3186
3187
3188
3189
3190
3191
3192
3193
3194