1 """This module encapsulates a document stored in a GNUmed database."""
2
3 __author__ = "Karsten Hilbert <Karsten.Hilbert@gmx.net>"
4 __license__ = "GPL v2 or later"
5
6 import sys, os, shutil, os.path, types, time, logging
7
8
9 if __name__ == '__main__':
10 sys.path.insert(0, '../../')
11 from Gnumed.pycommon import gmExceptions
12 from Gnumed.pycommon import gmBusinessDBObject
13 from Gnumed.pycommon import gmPG2
14 from Gnumed.pycommon import gmTools
15 from Gnumed.pycommon import gmMimeLib
16 from Gnumed.pycommon import gmDateTime
17 from Gnumed.pycommon import gmWorkerThread
18
19 from Gnumed.business import gmOrganization
20
21
22 _log = logging.getLogger('gm.docs')
23
24 MUGSHOT=26
25 DOCUMENT_TYPE_VISUAL_PROGRESS_NOTE = 'visual progress note'
26 DOCUMENT_TYPE_PRESCRIPTION = 'prescription'
27
28
30 """Represents a folder with medical documents for a single patient."""
31
33 """Fails if
34
35 - patient referenced by aPKey does not exist
36 """
37 self.pk_patient = aPKey
38 if not self._pkey_exists():
39 raise gmExceptions.ConstructorError("No patient with PK [%s] in database." % aPKey)
40
41
42
43
44
45
46
47 _log.debug('instantiated document folder for patient [%s]' % self.pk_patient)
48
51
52
53
55 """Does this primary key exist ?
56
57 - true/false/None
58 """
59
60 rows, idx = gmPG2.run_ro_queries(queries = [
61 {'cmd': "select exists(select pk from dem.identity where pk = %s)", 'args': [self.pk_patient]}
62 ])
63 if not rows[0][0]:
64 _log.error("patient [%s] not in demographic database" % self.pk_patient)
65 return None
66 return True
67
68
69
71 cmd = """
72 SELECT pk_doc
73 FROM blobs.v_doc_med
74 WHERE
75 pk_patient = %(pat)s
76 AND
77 type = %(typ)s
78 AND
79 ext_ref = %(ref)s
80 ORDER BY
81 clin_when DESC
82 LIMIT 1
83 """
84 args = {
85 'pat': self.pk_patient,
86 'typ': DOCUMENT_TYPE_PRESCRIPTION,
87 'ref': 'FreeDiams'
88 }
89 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}])
90 if len(rows) == 0:
91 _log.info('no FreeDiams prescription available for patient [%s]' % self.pk_patient)
92 return None
93 prescription = cDocument(aPK_obj = rows[0][0])
94 return prescription
95
96
98 cmd = "SELECT pk_obj FROM blobs.v_latest_mugshot WHERE pk_patient = %s"
99 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}])
100 if len(rows) == 0:
101 _log.info('no mugshots available for patient [%s]' % self.pk_patient)
102 return None
103 return cDocumentPart(aPK_obj = rows[0][0])
104
105 latest_mugshot = property(get_latest_mugshot, lambda x:x)
106
107
109 if latest_only:
110 cmd = "select pk_doc, pk_obj from blobs.v_latest_mugshot where pk_patient=%s"
111 else:
112 cmd = """
113 select
114 vdm.pk_doc as pk_doc,
115 dobj.pk as pk_obj
116 from
117 blobs.v_doc_med vdm
118 blobs.doc_obj dobj
119 where
120 vdm.pk_type = (select pk from blobs.doc_type where name = 'patient photograph')
121 and vdm.pk_patient = %s
122 and dobj.fk_doc = vdm.pk_doc
123 """
124 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}])
125 return rows
126
127
129 """return flat list of document IDs"""
130
131 args = {
132 'ID': self.pk_patient,
133 'TYP': doc_type
134 }
135
136 cmd = """
137 select vdm.pk_doc
138 from blobs.v_doc_med vdm
139 where
140 vdm.pk_patient = %%(ID)s
141 %s
142 order by vdm.clin_when"""
143
144 if doc_type is None:
145 cmd = cmd % ''
146 else:
147 try:
148 int(doc_type)
149 cmd = cmd % 'and vdm.pk_type = %(TYP)s'
150 except (TypeError, ValueError):
151 cmd = cmd % 'and vdm.pk_type = (select pk from blobs.doc_type where name = %(TYP)s)'
152
153 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}])
154 doc_ids = []
155 for row in rows:
156 doc_ids.append(row[0])
157 return doc_ids
158
159
166
167
169 args = {'pat': self.pk_patient}
170 cmd = _SQL_get_document_fields % """
171 pk_doc IN (
172 SELECT DISTINCT ON (b_vo.pk_doc) b_vo.pk_doc
173 FROM blobs.v_obj4doc_no_data b_vo
174 WHERE
175 pk_patient = %(pat)s
176 AND
177 reviewed IS FALSE
178 )
179 ORDER BY clin_when DESC"""
180 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
181 return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
182
183
184 - def get_documents(self, doc_type=None, pk_episodes=None, encounter=None, order_by=None, exclude_unsigned=False, pk_types=None):
185 """Return list of documents."""
186
187 args = {
188 'pat': self.pk_patient,
189 'type': doc_type,
190 'enc': encounter
191 }
192 where_parts = ['pk_patient = %(pat)s']
193
194 if doc_type is not None:
195 try:
196 int(doc_type)
197 where_parts.append('pk_type = %(type)s')
198 except (TypeError, ValueError):
199 where_parts.append('pk_type = (SELECT pk FROM blobs.doc_type WHERE name = %(type)s)')
200
201 if pk_types is not None:
202 where_parts.append('pk_type IN %(pk_types)s')
203 args['pk_types'] = tuple(pk_types)
204
205 if (pk_episodes is not None) and (len(pk_episodes) > 0):
206 where_parts.append('pk_episode IN %(epis)s')
207 args['epis'] = tuple(pk_episodes)
208
209 if encounter is not None:
210 where_parts.append('pk_encounter = %(enc)s')
211
212 if exclude_unsigned:
213 where_parts.append('pk_doc IN (SELECT b_vo.pk_doc FROM blobs.v_obj4doc_no_data b_vo WHERE b_vo.pk_patient = %(pat)s AND b_vo.reviewed IS TRUE)')
214
215 if order_by is None:
216 order_by = 'ORDER BY clin_when'
217
218 cmd = "%s\n%s" % (_SQL_get_document_fields % ' AND '.join(where_parts), order_by)
219 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
220
221 return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
222
223 documents = property(get_documents, lambda x:x)
224
225
226 - def add_document(self, document_type=None, encounter=None, episode=None, link_obj=None):
227 return create_document(link_obj = link_obj, document_type = document_type, encounter = encounter, episode = episode)
228
229
239
240
242 cmd = gmOrganization._SQL_get_org_unit % (
243 'pk_org_unit IN (SELECT DISTINCT ON (pk_org_unit) pk_org_unit FROM blobs.v_doc_med WHERE pk_patient = %(pat)s)'
244 )
245 args = {'pat': self.pk_patient}
246 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
247 return [ gmOrganization.cOrgUnit(row = {'data': r, 'idx': idx, 'pk_field': 'pk_org_unit'}) for r in rows ]
248
249 all_document_org_units = property(_get_all_document_org_units, lambda x:x)
250
251
252 _SQL_get_document_part_fields = "select * from blobs.v_obj4doc_no_data where %s"
253
255 """Represents one part of a medical document."""
256
257 _cmd_fetch_payload = _SQL_get_document_part_fields % "pk_obj = %s"
258 _cmds_store_payload = [
259 """UPDATE blobs.doc_obj SET
260 seq_idx = %(seq_idx)s,
261 comment = gm.nullify_empty_string(%(obj_comment)s),
262 filename = gm.nullify_empty_string(%(filename)s),
263 fk_intended_reviewer = %(pk_intended_reviewer)s
264 WHERE
265 pk = %(pk_obj)s
266 AND
267 xmin = %(xmin_doc_obj)s
268 RETURNING
269 xmin AS xmin_doc_obj"""
270 ]
271 _updatable_fields = [
272 'seq_idx',
273 'obj_comment',
274 'pk_intended_reviewer',
275 'filename'
276 ]
277
278
279
280 - def save_to_file(self, aChunkSize=0, filename=None, target_mime=None, target_extension=None, ignore_conversion_problems=False, directory=None, adjust_extension=False, conn=None):
281
282 if filename is None:
283 filename = self.get_useful_filename(make_unique = True, directory = directory)
284
285 filename = self.__download_to_file(filename = filename)
286 if filename is None:
287 return None
288
289 if target_mime is None:
290 if filename.endswith('.dat'):
291 if adjust_extension:
292 return gmMimeLib.adjust_extension_by_mimetype(filename)
293 return filename
294
295 if target_extension is None:
296 target_extension = gmMimeLib.guess_ext_by_mimetype(mimetype = target_mime)
297
298 target_path, name = os.path.split(filename)
299 name, tmp = os.path.splitext(name)
300 target_fname = gmTools.get_unique_filename (
301 prefix = '%s-conv-' % name,
302 suffix = target_extension
303 )
304 _log.debug('attempting conversion: [%s] -> [<%s>:%s]', filename, target_mime, target_fname)
305 converted_fname = gmMimeLib.convert_file (
306 filename = filename,
307 target_mime = target_mime,
308 target_filename = target_fname
309 )
310 if converted_fname is not None:
311 return converted_fname
312
313 _log.warning('conversion failed')
314 if not ignore_conversion_problems:
315 return None
316
317 if filename.endswith('.dat'):
318 if adjust_extension:
319 filename = gmMimeLib.adjust_extension_by_mimetype(filename)
320 _log.warning('programmed to ignore conversion problems, hoping receiver can handle [%s]', filename)
321 return filename
322
323
325 cmd = """
326 SELECT
327 reviewer,
328 reviewed_when,
329 is_technically_abnormal,
330 clinically_relevant,
331 is_review_by_responsible_reviewer,
332 is_your_review,
333 coalesce(comment, '')
334 FROM blobs.v_reviewed_doc_objects
335 WHERE pk_doc_obj = %s
336 ORDER BY
337 is_your_review desc,
338 is_review_by_responsible_reviewer desc,
339 reviewed_when desc
340 """
341 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}])
342 return rows
343
344
346 return cDocument(aPK_obj = self._payload[self._idx['pk_doc']])
347
348 containing_document = property(__get_containing_document)
349
350
351
352
354
355 if not (os.access(fname, os.R_OK) and os.path.isfile(fname)):
356 _log.error('[%s] is not a readable file' % fname)
357 return False
358
359 cmd = "UPDATE blobs.doc_obj SET data = %(data)s::BYTEA WHERE pk = %(pk)s RETURNING md5(data) AS md5"
360 args = {'pk': self.pk_obj}
361 md5 = gmTools.file2md5(filename = fname, return_hex = True)
362 if not gmPG2.file2bytea(conn = link_obj, query = cmd, filename = fname, args = args, file_md5 = md5):
363 return False
364
365
366 self.refetch_payload(link_obj = link_obj)
367 return True
368
369
370 - def set_reviewed(self, technically_abnormal=None, clinically_relevant=None):
371
372 cmd = """
373 select pk
374 from blobs.reviewed_doc_objs
375 where
376 fk_reviewed_row = %s and
377 fk_reviewer = (select pk from dem.staff where db_user = current_user)"""
378 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}])
379
380
381 if len(rows) == 0:
382 cols = [
383 "fk_reviewer",
384 "fk_reviewed_row",
385 "is_technically_abnormal",
386 "clinically_relevant"
387 ]
388 vals = [
389 '%(fk_row)s',
390 '%(abnormal)s',
391 '%(relevant)s'
392 ]
393 args = {
394 'fk_row': self.pk_obj,
395 'abnormal': technically_abnormal,
396 'relevant': clinically_relevant
397 }
398 cmd = """
399 insert into blobs.reviewed_doc_objs (
400 %s
401 ) values (
402 (select pk from dem.staff where db_user=current_user),
403 %s
404 )""" % (', '.join(cols), ', '.join(vals))
405
406
407 if len(rows) == 1:
408 pk_review = rows[0][0]
409 args = {
410 'abnormal': technically_abnormal,
411 'relevant': clinically_relevant,
412 'pk_review': pk_review
413 }
414 cmd = """
415 UPDATE blobs.reviewed_doc_objs SET
416 is_technically_abnormal = %(abnormal)s,
417 clinically_relevant = %(relevant)s
418 WHERE
419 pk = %(pk_review)s
420 """
421 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}])
422
423 return True
424
425
427 if self._payload[self._idx['type']] != 'patient photograph':
428 return False
429
430 cmd = 'SELECT coalesce(max(seq_idx)+1, 1) FROM blobs.doc_obj WHERE fk_doc = %(doc_id)s'
431 rows, idx = gmPG2.run_ro_queries (
432 queries = [{
433 'cmd': cmd,
434 'args': {'doc_id': self._payload[self._idx['pk_doc']]}
435 }]
436 )
437 self._payload[self._idx['seq_idx']] = rows[0][0]
438 self._is_modified = True
439 self.save_payload()
440
441
443 if pk_doc == self._payload[self._idx['pk_doc']]:
444 return True
445
446 cmd = """
447 UPDATE blobs.doc_obj SET
448 fk_doc = %(pk_doc_target)s,
449 -- coalesce needed for no-parts target docs
450 seq_idx = (SELECT coalesce(max(seq_idx) + 1, 1) FROM blobs.doc_obj WHERE fk_doc = %(pk_doc_target)s)
451 WHERE
452 EXISTS(SELECT 1 FROM blobs.doc_med WHERE pk = %(pk_doc_target)s)
453 AND
454 pk = %(pk_obj)s
455 AND
456 xmin = %(xmin_doc_obj)s
457 RETURNING fk_doc
458 """
459 args = {
460 'pk_doc_target': pk_doc,
461 'pk_obj': self.pk_obj,
462 'xmin_doc_obj': self._payload[self._idx['xmin_doc_obj']]
463 }
464 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True, get_col_idx = False)
465 if len(rows) == 0:
466 return False
467
468
469
470
471
472
473 if rows[0]['fk_doc'] == self._payload[self._idx['pk_doc']]:
474 return False
475
476 self.refetch_payload()
477 return True
478
479
481
482 fname = self.save_to_file(aChunkSize = chunksize)
483 if fname is None:
484 return False, ''
485
486 success, msg = gmMimeLib.call_viewer_on_file(fname, block = block)
487 if not success:
488 return False, msg
489
490 return True, ''
491
492
509
510
545
546
557
558
559 - def get_useful_filename(self, patient=None, make_unique=False, directory=None, include_gnumed_tag=True, date_before_type=False, name_first=True):
560 patient_part = ''
561 if patient is not None:
562 if name_first:
563 patient_part = '%s-' % patient.subdir_name
564 else:
565 patient_part = '-%s' % patient.subdir_name
566
567
568 suffix = '.dat'
569 if self._payload[self._idx['filename']] is not None:
570 tmp, suffix = os.path.splitext (
571 gmTools.fname_sanitize(self._payload[self._idx['filename']]).lower()
572 )
573 if suffix == '':
574 suffix = '.dat'
575
576 if include_gnumed_tag:
577 fname_template = 'gm_doc-part_%s-%%s' % self._payload[self._idx['seq_idx']]
578 else:
579 fname_template = '%%s-part_%s' % self._payload[self._idx['seq_idx']]
580
581 if date_before_type:
582 date_type_part = '%s-%s' % (
583 gmDateTime.pydt_strftime(self._payload[self._idx['date_generated']], '%Y-%m-%d', 'utf-8', gmDateTime.acc_days),
584 self._payload[self._idx['l10n_type']].replace(' ', '_').replace('-', '_'),
585 )
586 else:
587 date_type_part = '%s-%s' % (
588 self._payload[self._idx['l10n_type']].replace(' ', '_').replace('-', '_'),
589 gmDateTime.pydt_strftime(self._payload[self._idx['date_generated']], '%Y-%m-%d', 'utf-8', gmDateTime.acc_days)
590 )
591
592 if name_first:
593 date_type_name_part = patient_part + date_type_part
594 else:
595 date_type_name_part = date_type_part + patient_part
596
597 fname = fname_template % date_type_name_part
598
599 if make_unique:
600 fname = gmTools.get_unique_filename (
601 prefix = '%s-' % gmTools.fname_sanitize(fname),
602 suffix = suffix,
603 tmp_dir = directory
604 )
605 else:
606 fname = gmTools.fname_sanitize(os.path.join(gmTools.coalesce(directory, gmTools.gmPaths().tmp_dir), fname + suffix))
607
608 return fname
609
610 useful_filename = property(get_useful_filename)
611
612
613
614
616 if self._payload[self._idx['size']] == 0:
617 _log.debug('part size 0, nothing to download')
618 return None
619
620 if filename is None:
621 filename = gmTools.get_unique_filename()
622 success = gmPG2.bytea2file (
623 data_query = {
624 'cmd': 'SELECT substring(data from %(start)s for %(size)s) FROM blobs.doc_obj WHERE pk=%(pk)s',
625 'args': {'pk': self.pk_obj}
626 },
627 filename = filename,
628 chunk_size = aChunkSize,
629 data_size = self._payload[self._idx['size']],
630 conn = conn
631 )
632 if not success:
633 return None
634
635 return filename
636
637
646
647
649 cmd = """
650 SELECT blobs.delete_document_part(%(pk)s, %(enc)s)
651 WHERE NOT EXISTS
652 (SELECT 1 FROM clin.export_item where fk_doc_obj = %(pk)s)
653 """
654 args = {'pk': part_pk, 'enc': encounter_pk}
655 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}])
656 return
657
658
659 _SQL_get_document_fields = "SELECT * FROM blobs.v_doc_med b_vdm WHERE %s"
660
661 -class cDocument(gmBusinessDBObject.cBusinessDBObject):
662 """Represents one medical document."""
663
664 _cmd_fetch_payload = _SQL_get_document_fields % "pk_doc = %s"
665 _cmds_store_payload = [
666 """UPDATE blobs.doc_med SET
667 fk_type = %(pk_type)s,
668 fk_episode = %(pk_episode)s,
669 fk_encounter = %(pk_encounter)s,
670 fk_org_unit = %(pk_org_unit)s,
671 unit_is_receiver = %(unit_is_receiver)s,
672 clin_when = %(clin_when)s,
673 comment = gm.nullify_empty_string(%(comment)s),
674 ext_ref = gm.nullify_empty_string(%(ext_ref)s),
675 fk_hospital_stay = %(pk_hospital_stay)s
676 WHERE
677 pk = %(pk_doc)s and
678 xmin = %(xmin_doc_med)s
679 RETURNING
680 xmin AS xmin_doc_med"""
681 ]
682 _updatable_fields = [
683 'pk_type',
684 'comment',
685 'clin_when',
686 'ext_ref',
687 'pk_episode',
688 'pk_encounter',
689 'pk_org_unit',
690 'unit_is_receiver',
691 'pk_hospital_stay'
692 ]
693
694
696 try: del self.__has_unreviewed_parts
697 except AttributeError: pass
698
699 return super(cDocument, self).refetch_payload(ignore_changes = ignore_changes, link_obj = link_obj)
700
701
703 """Get document descriptions.
704
705 - will return a list of rows
706 """
707 if max_lng is None:
708 cmd = "SELECT pk, text FROM blobs.doc_desc WHERE fk_doc = %s"
709 else:
710 cmd = "SELECT pk, substring(text from 1 for %s) FROM blobs.doc_desc WHERE fk_doc=%%s" % max_lng
711 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}])
712 return rows
713
714
719
720
722 cmd = "update blobs.doc_desc set text = %(desc)s where fk_doc = %(doc)s and pk = %(pk_desc)s"
723 gmPG2.run_rw_queries(queries = [
724 {'cmd': cmd, 'args': {'doc': self.pk_obj, 'pk_desc': pk, 'desc': description}}
725 ])
726 return True
727
728
730 cmd = "delete from blobs.doc_desc where fk_doc = %(doc)s and pk = %(desc)s"
731 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': {'doc': self.pk_obj, 'desc': pk}}])
732 return True
733
734
739
740 parts = property(_get_parts, lambda x:x)
741
742
743 - def add_part(self, file=None, link_obj=None):
744 """Add a part to the document."""
745
746 cmd = """
747 INSERT INTO blobs.doc_obj (
748 fk_doc, data, seq_idx
749 ) VALUES (
750 %(doc_id)s,
751 ''::bytea,
752 (SELECT coalesce(max(seq_idx)+1, 1) FROM blobs.doc_obj WHERE fk_doc = %(doc_id)s)
753 ) RETURNING pk"""
754 rows, idx = gmPG2.run_rw_queries (
755 link_obj = link_obj,
756 queries = [{'cmd': cmd, 'args': {'doc_id': self.pk_obj}}],
757 return_data = True
758 )
759
760 pk_part = rows[0][0]
761 new_part = cDocumentPart(aPK_obj = pk_part, link_obj = link_obj)
762 if not new_part.update_data_from_file(link_obj = link_obj, fname = file):
763 _log.error('cannot import binary data from [%s] into document part' % file)
764 gmPG2.run_rw_queries (
765 link_obj = link_obj,
766 queries = [{'cmd': "DELETE FROM blobs.doc_obj WHERE pk = %s", 'args': [pk_part]}]
767 )
768 return None
769 new_part['filename'] = file
770 new_part.save_payload(conn = link_obj)
771
772 return new_part
773
774
776
777 new_parts = []
778
779 for filename in files:
780 new_part = self.add_part(file = filename)
781 if new_part is None:
782 msg = 'cannot instantiate document part object from [%s]' % filename
783 _log.error(msg)
784 return (False, msg, filename)
785 new_parts.append(new_part)
786
787 if reviewer is not None:
788 new_part['pk_intended_reviewer'] = reviewer
789 success, data = new_part.save_payload()
790 if not success:
791 msg = 'cannot set reviewer to [%s] on [%s]' % (reviewer, filename)
792 _log.error(msg)
793 _log.error(str(data))
794 return (False, msg, filename)
795
796 return (True, '', new_parts)
797
798
800 fnames = []
801 for part in self.parts:
802 fname = part.save_to_file(aChunkSize = chunksize, directory = export_dir, conn = conn)
803 if fname is None:
804 _log.error('cannot export document part [%s]', part)
805 continue
806 fnames.append(fname)
807 return fnames
808
809
811 try:
812 return self.__has_unreviewed_parts
813 except AttributeError:
814 pass
815
816 cmd = "SELECT EXISTS(SELECT 1 FROM blobs.v_obj4doc_no_data WHERE pk_doc = %(pk)s AND reviewed IS FALSE)"
817 args = {'pk': self.pk_obj}
818 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}])
819 self.__has_unreviewed_parts = rows[0][0]
820
821 return self.__has_unreviewed_parts
822
823 has_unreviewed_parts = property(_get_has_unreviewed_parts, lambda x:x)
824
825
826 - def set_reviewed(self, technically_abnormal=None, clinically_relevant=None):
827
828 for part in self.parts:
829 if not part.set_reviewed(technically_abnormal, clinically_relevant):
830 return False
831 return True
832
833
835 for part in self.parts:
836 part['pk_intended_reviewer'] = reviewer
837 success, data = part.save_payload()
838 if not success:
839 _log.error('cannot set reviewer to [%s]' % reviewer)
840 _log.error(str(data))
841 return False
842 return True
843
844
880
881
937
938
944
945 hospital_stay = property(_get_hospital_stay, lambda x:x)
946
947
949 if self._payload[self._idx['pk_org_unit']] is None:
950 return None
951 return gmOrganization.cOrgUnit(self._payload[self._idx['pk_org_unit']])
952
953 org_unit = property(_get_org_unit, lambda x:x)
954
955
959
960 procedures = property(_get_procedures, lambda x:x)
961
962
966
967 bills = property(_get_bills, lambda x:x)
968
969
970 -def create_document(document_type=None, encounter=None, episode=None, link_obj=None):
971 """Returns new document instance or raises an exception."""
972 try:
973 int(document_type)
974 cmd = """INSERT INTO blobs.doc_med (fk_type, fk_encounter, fk_episode) VALUES (%(type)s, %(enc)s, %(epi)s) RETURNING pk"""
975 except ValueError:
976 create_document_type(document_type = document_type)
977 cmd = """
978 INSERT INTO blobs.doc_med (
979 fk_type,
980 fk_encounter,
981 fk_episode
982 ) VALUES (
983 coalesce (
984 (SELECT pk from blobs.doc_type bdt where bdt.name = %(type)s),
985 (SELECT pk from blobs.doc_type bdt where _(bdt.name) = %(type)s)
986 ),
987 %(enc)s,
988 %(epi)s
989 ) RETURNING pk"""
990 args = {'type': document_type, 'enc': encounter, 'epi': episode}
991 rows, idx = gmPG2.run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = True)
992 doc = cDocument(aPK_obj = rows[0][0], link_obj = link_obj)
993 return doc
994
995
996 -def search_for_documents(patient_id=None, type_id=None, external_reference=None, pk_episode=None, pk_types=None):
997 """Searches for documents with the given patient and type ID."""
998
999 if (patient_id is None) and (pk_episode is None):
1000 raise ValueError('need patient_id or pk_episode to search for document')
1001
1002 where_parts = []
1003 args = {
1004 'pat_id': patient_id,
1005 'type_id': type_id,
1006 'ref': external_reference,
1007 'pk_epi': pk_episode
1008 }
1009
1010 if patient_id is not None:
1011 where_parts.append('pk_patient = %(pat_id)s')
1012
1013 if type_id is not None:
1014 where_parts.append('pk_type = %(type_id)s')
1015
1016 if external_reference is not None:
1017 where_parts.append('ext_ref = %(ref)s')
1018
1019 if pk_episode is not None:
1020 where_parts.append('pk_episode = %(pk_epi)s')
1021
1022 if pk_types is not None:
1023 where_parts.append('pk_type IN %(pk_types)s')
1024 args['pk_types'] = tuple(pk_types)
1025
1026 cmd = _SQL_get_document_fields % ' AND '.join(where_parts)
1027 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
1028 return [ cDocument(row = {'data': r, 'idx': idx, 'pk_field': 'pk_doc'}) for r in rows ]
1029
1030
1032
1033 cmd = "SELECT blobs.delete_document(%(pk)s, %(enc)s)"
1034 args = {'pk': document_id, 'enc': encounter_id}
1035 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True)
1036 if not rows[0][0]:
1037 _log.error('cannot delete document [%s]', document_id)
1038 return False
1039 return True
1040
1041
1043
1044 _log.debug('reclassifying documents by type')
1045 _log.debug('original: %s', original_type)
1046 _log.debug('target: %s', target_type)
1047
1048 if target_type['pk_doc_type'] == original_type['pk_doc_type']:
1049 return True
1050
1051 cmd = """
1052 update blobs.doc_med set
1053 fk_type = %(new_type)s
1054 where
1055 fk_type = %(old_type)s
1056 """
1057 args = {'new_type': target_type['pk_doc_type'], 'old_type': original_type['pk_doc_type']}
1058
1059 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}])
1060
1061 return True
1062
1063
1065 """Represents a document type."""
1066 _cmd_fetch_payload = """select * from blobs.v_doc_type where pk_doc_type=%s"""
1067 _cmds_store_payload = [
1068 """update blobs.doc_type set
1069 name = %(type)s
1070 where
1071 pk=%(pk_obj)s and
1072 xmin=%(xmin_doc_type)s""",
1073 """select xmin_doc_type from blobs.v_doc_type where pk_doc_type = %(pk_obj)s"""
1074 ]
1075 _updatable_fields = ['type']
1076
1078
1079 if translation.strip() == '':
1080 return False
1081
1082 if translation.strip() == self._payload[self._idx['l10n_type']].strip():
1083 return True
1084
1085 rows, idx = gmPG2.run_rw_queries (
1086 queries = [
1087 {'cmd': 'select i18n.i18n(%s)', 'args': [self._payload[self._idx['type']]]},
1088 {'cmd': 'select i18n.upd_tx((select i18n.get_curr_lang()), %(orig)s, %(tx)s)',
1089 'args': {
1090 'orig': self._payload[self._idx['type']],
1091 'tx': translation
1092 }
1093 }
1094 ],
1095 return_data = True
1096 )
1097 if not rows[0][0]:
1098 _log.error('cannot set translation to [%s]' % translation)
1099 return False
1100
1101 return self.refetch_payload()
1102
1103
1105 rows, idx = gmPG2.run_ro_queries (
1106 queries = [{'cmd': "SELECT * FROM blobs.v_doc_type"}],
1107 get_col_idx = True
1108 )
1109 doc_types = []
1110 for row in rows:
1111 row_def = {'pk_field': 'pk_doc_type', 'idx': idx, 'data': row}
1112 doc_types.append(cDocumentType(row = row_def))
1113 return doc_types
1114
1115
1117 args = {'typ': document_type.strip()}
1118
1119 cmd = 'SELECT pk FROM blobs.doc_type WHERE name = %(typ)s'
1120 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
1121 if len(rows) == 0:
1122 cmd = 'SELECT pk FROM blobs.doc_type WHERE _(name) = %(typ)s'
1123 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
1124
1125 if len(rows) == 0:
1126 return None
1127
1128 return rows[0]['pk']
1129
1130
1132 args = {'types': tuple(document_types)}
1133 cmd = 'SELECT pk_doc_type, coalesce(l10n_type, type) as desc FROM blobs.v_doc_type WHERE l10n_type IN %(types)s OR type IN %(types)s'
1134 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
1135 return rows
1136
1137
1139
1140 cmd = 'SELECT pk FROM blobs.doc_type WHERE name = %s'
1141 rows, idx = gmPG2.run_ro_queries (
1142 queries = [{'cmd': cmd, 'args': [document_type]}]
1143 )
1144 if len(rows) == 0:
1145 _log.debug('creating document type [%s]', document_type)
1146 cmd1 = "INSERT INTO blobs.doc_type (name) VALUES (%s) RETURNING pk"
1147 rows, idx = gmPG2.run_rw_queries (
1148 queries = [{'cmd': cmd1, 'args': [document_type]}],
1149 return_data = True
1150 )
1151 return cDocumentType(aPK_obj = rows[0][0])
1152
1153
1155 if document_type['is_in_use']:
1156 return False
1157
1158 gmPG2.run_rw_queries (
1159 queries = [{
1160 'cmd': 'delete from blobs.doc_type where pk=%s',
1161 'args': [document_type['pk_doc_type']]
1162 }]
1163 )
1164 return True
1165
1166
1168 """This needs *considerably* more smarts."""
1169 dirname = gmTools.get_unique_filename (
1170 prefix = '',
1171 suffix = time.strftime(".%Y%m%d-%H%M%S", time.localtime())
1172 )
1173
1174 path, doc_ID = os.path.split(dirname)
1175 return doc_ID
1176
1177
1179 mimetypes = {}
1180 cmd = 'SELECT pk FROM blobs.doc_med'
1181 doc_pks, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd}])
1182 print('Detecting mimetypes in document archive ...')
1183 doc_idx = 0
1184 part_count = 0
1185 for pk_row in doc_pks:
1186 doc_idx += 1
1187 print('\n#%s - document %s of %s: ' % (pk_row['pk'], doc_idx, len(doc_pks)), end = '')
1188 doc = cDocument(aPK_obj = pk_row['pk'])
1189 for part in doc.parts:
1190 part_count += 1
1191 print('#%s:%s bytes, ' % (part['pk_obj'], part['size']), end = '')
1192 part_fname = part.save_to_file()
1193 mimetype = gmMimeLib.guess_mimetype(part_fname)
1194 try:
1195 mimetypes[mimetype]['count'] += 1
1196 except KeyError:
1197 mimetypes[mimetype] = {
1198 'count': 1,
1199 'viewer': gmMimeLib.get_viewer_cmd(mimetype),
1200 'editor': gmMimeLib.get_editor_cmd(mimetype),
1201 'extension': gmMimeLib.guess_ext_by_mimetype(mimetype)
1202 }
1203 print('')
1204 print('')
1205 print('Number of documents :', len(doc_pks))
1206 print('Number of parts :', part_count)
1207 print('Number of mime types:', len(mimetypes))
1208 for mimetype in mimetypes:
1209 print('')
1210 print('<%s>' % mimetype)
1211 print(' Extension:', mimetypes[mimetype]['extension'])
1212 print(' Use count:', mimetypes[mimetype]['count'])
1213 print(' Viewer:', mimetypes[mimetype]['viewer'])
1214 print(' Editor:', mimetypes[mimetype]['editor'])
1215 return 0
1216
1217
1218
1219
1220 if __name__ == '__main__':
1221
1222 if len(sys.argv) < 2:
1223 sys.exit()
1224
1225 if sys.argv[1] != 'test':
1226 sys.exit()
1227
1228
1230
1231 print("----------------------")
1232 print("listing document types")
1233 print("----------------------")
1234
1235 for dt in get_document_types():
1236 print(dt)
1237
1238 print("------------------------------")
1239 print("testing document type handling")
1240 print("------------------------------")
1241
1242 dt = create_document_type(document_type = 'dummy doc type for unit test 1')
1243 print("created:", dt)
1244
1245 dt['type'] = 'dummy doc type for unit test 2'
1246 dt.save_payload()
1247 print("changed base name:", dt)
1248
1249 dt.set_translation(translation = 'Dummy-Dokumenten-Typ fuer Unit-Test')
1250 print("translated:", dt)
1251
1252 print("deleted:", delete_document_type(document_type = dt))
1253
1254 return
1255
1257
1258 print("-----------------------")
1259 print("testing document import")
1260 print("-----------------------")
1261
1262 docs = search_for_documents(patient_id=12)
1263 doc = docs[0]
1264 print("adding to doc:", doc)
1265
1266 fname = sys.argv[1]
1267 print("adding from file:", fname)
1268 part = doc.add_part(file=fname)
1269 print("new part:", part)
1270
1271 return
1272
1274
1275 doc_folder = cDocumentFolder(aPKey=12)
1276
1277
1278
1279
1280 docs = doc_folder.get_documents()
1281 for doc in docs:
1282
1283
1284
1285 print('--------------------------')
1286 print(doc.format(single_line = True))
1287 print(doc.format())
1288
1289
1305
1308
1309
1320
1321
1322 pk = 12
1323 from Gnumed.business.gmPerson import cPatient
1324 pat = cPatient(pk)
1325 doc_folder = cDocumentFolder(aPKey = pk)
1326 for doc in doc_folder.documents:
1327 for part in doc.parts:
1328 part.format_metainfo(callback = desc_printer)
1329 input('waiting ...')
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345 from Gnumed.pycommon import gmI18N
1346 gmI18N.activate_locale()
1347 gmI18N.install_domain()
1348
1349
1350
1351
1352
1353
1354 test_check_mimetypes_in_archive()
1355
1356
1357