Package Gnumed :: Package business :: Module gmDocuments
[frames] | no frames]

Source Code for Module Gnumed.business.gmDocuments

   1  """This module encapsulates a document stored in a GNUmed database.""" 
   2  #============================================================ 
   3  __author__ = "Karsten Hilbert <Karsten.Hilbert@gmx.net>" 
   4  __license__ = "GPL v2 or later" 
   5   
   6  import sys, os, shutil, os.path, types, time, logging 
   7   
   8   
   9  if __name__ == '__main__': 
  10          sys.path.insert(0, '../../') 
  11  from Gnumed.pycommon import gmExceptions 
  12  from Gnumed.pycommon import gmBusinessDBObject 
  13  from Gnumed.pycommon import gmPG2 
  14  from Gnumed.pycommon import gmTools 
  15  from Gnumed.pycommon import gmMimeLib 
  16  from Gnumed.pycommon import gmDateTime 
  17   
  18  from Gnumed.business import gmOrganization 
  19   
  20   
  21  _log = logging.getLogger('gm.docs') 
  22   
  23  MUGSHOT=26 
  24  DOCUMENT_TYPE_VISUAL_PROGRESS_NOTE = 'visual progress note' 
  25  DOCUMENT_TYPE_PRESCRIPTION = 'prescription' 
  26   
  27  #============================================================ 
28 -class cDocumentFolder:
29 """Represents a folder with medical documents for a single patient.""" 30
31 - def __init__(self, aPKey = None):
32 """Fails if 33 34 - patient referenced by aPKey does not exist 35 """ 36 self.pk_patient = aPKey # == identity.pk == primary key 37 if not self._pkey_exists(): 38 raise gmExceptions.ConstructorError("No patient with PK [%s] in database." % aPKey) 39 40 # register backend notification interests 41 # (keep this last so we won't hang on threads when 42 # failing this constructor for other reasons ...) 43 # if not self._register_interests(): 44 # raise gmExceptions.ConstructorError, "cannot register signal interests" 45 46 _log.debug('instantiated document folder for patient [%s]' % self.pk_patient)
47 #--------------------------------------------------------
48 - def cleanup(self):
49 pass
50 #-------------------------------------------------------- 51 # internal helper 52 #--------------------------------------------------------
53 - def _pkey_exists(self):
54 """Does this primary key exist ? 55 56 - true/false/None 57 """ 58 # patient in demographic database ? 59 rows, idx = gmPG2.run_ro_queries(queries = [ 60 {'cmd': "select exists(select pk from dem.identity where pk = %s)", 'args': [self.pk_patient]} 61 ]) 62 if not rows[0][0]: 63 _log.error("patient [%s] not in demographic database" % self.pk_patient) 64 return None 65 return True
66 #-------------------------------------------------------- 67 # API 68 #--------------------------------------------------------
70 cmd = """ 71 SELECT pk_doc 72 FROM blobs.v_doc_med 73 WHERE 74 pk_patient = %(pat)s 75 AND 76 type = %(typ)s 77 AND 78 ext_ref = %(ref)s 79 ORDER BY 80 clin_when DESC 81 LIMIT 1 82 """ 83 args = { 84 'pat': self.pk_patient, 85 'typ': DOCUMENT_TYPE_PRESCRIPTION, 86 'ref': 'FreeDiams' 87 } 88 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) 89 if len(rows) == 0: 90 _log.info('no FreeDiams prescription available for patient [%s]' % self.pk_patient) 91 return None 92 prescription = cDocument(aPK_obj = rows[0][0]) 93 return prescription
94 95 #--------------------------------------------------------
96 - def get_latest_mugshot(self):
97 cmd = "SELECT pk_obj FROM blobs.v_latest_mugshot WHERE pk_patient = %s" 98 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}]) 99 if len(rows) == 0: 100 _log.info('no mugshots available for patient [%s]' % self.pk_patient) 101 return None 102 return cDocumentPart(aPK_obj = rows[0][0])
103 104 latest_mugshot = property(get_latest_mugshot, lambda x:x) 105 106 #--------------------------------------------------------
107 - def get_mugshot_list(self, latest_only=True):
108 if latest_only: 109 cmd = "select pk_doc, pk_obj from blobs.v_latest_mugshot where pk_patient=%s" 110 else: 111 cmd = """ 112 select 113 vdm.pk_doc as pk_doc, 114 dobj.pk as pk_obj 115 from 116 blobs.v_doc_med vdm 117 blobs.doc_obj dobj 118 where 119 vdm.pk_type = (select pk from blobs.doc_type where name = 'patient photograph') 120 and vdm.pk_patient = %s 121 and dobj.fk_doc = vdm.pk_doc 122 """ 123 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}]) 124 return rows
125 126 #--------------------------------------------------------
127 - def get_doc_list(self, doc_type=None):
128 """return flat list of document IDs""" 129 130 args = { 131 'ID': self.pk_patient, 132 'TYP': doc_type 133 } 134 135 cmd = """ 136 select vdm.pk_doc 137 from blobs.v_doc_med vdm 138 where 139 vdm.pk_patient = %%(ID)s 140 %s 141 order by vdm.clin_when""" 142 143 if doc_type is None: 144 cmd = cmd % '' 145 else: 146 try: 147 int(doc_type) 148 cmd = cmd % 'and vdm.pk_type = %(TYP)s' 149 except (TypeError, ValueError): 150 cmd = cmd % 'and vdm.pk_type = (select pk from blobs.doc_type where name = %(TYP)s)' 151 152 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) 153 doc_ids = [] 154 for row in rows: 155 doc_ids.append(row[0]) 156 return doc_ids
157 158 #--------------------------------------------------------
159 - def get_visual_progress_notes(self, episodes=None, encounter=None):
160 return self.get_documents ( 161 doc_type = DOCUMENT_TYPE_VISUAL_PROGRESS_NOTE, 162 pk_episodes = episodes, 163 encounter = encounter 164 )
165 166 #--------------------------------------------------------
167 - def get_unsigned_documents(self):
168 args = {'pat': self.pk_patient} 169 cmd = _sql_fetch_document_fields % """ 170 pk_doc IN ( 171 SELECT DISTINCT ON (b_vo.pk_doc) b_vo.pk_doc 172 FROM blobs.v_obj4doc_no_data b_vo 173 WHERE 174 pk_patient = %(pat)s 175 AND 176 reviewed IS FALSE 177 ) 178 ORDER BY clin_when DESC""" 179 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 180 return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
181 182 #--------------------------------------------------------
183 - def get_documents(self, doc_type=None, pk_episodes=None, encounter=None, order_by=None, exclude_unsigned=False, pk_types=None):
184 """Return list of documents.""" 185 186 args = { 187 'pat': self.pk_patient, 188 'type': doc_type, 189 'enc': encounter 190 } 191 where_parts = ['pk_patient = %(pat)s'] 192 193 if doc_type is not None: 194 try: 195 int(doc_type) 196 where_parts.append('pk_type = %(type)s') 197 except (TypeError, ValueError): 198 where_parts.append('pk_type = (SELECT pk FROM blobs.doc_type WHERE name = %(type)s)') 199 200 if pk_types is not None: 201 where_parts.append('pk_type IN %(pk_types)s') 202 args['pk_types'] = tuple(pk_types) 203 204 if (pk_episodes is not None) and (len(pk_episodes) > 0): 205 where_parts.append('pk_episode IN %(epis)s') 206 args['epis'] = tuple(pk_episodes) 207 208 if encounter is not None: 209 where_parts.append('pk_encounter = %(enc)s') 210 211 if exclude_unsigned: 212 where_parts.append('pk_doc IN (SELECT b_vo.pk_doc FROM blobs.v_obj4doc_no_data b_vo WHERE b_vo.pk_patient = %(pat)s AND b_vo.reviewed IS TRUE)') 213 214 if order_by is None: 215 order_by = 'ORDER BY clin_when' 216 217 cmd = "%s\n%s" % (_sql_fetch_document_fields % ' AND '.join(where_parts), order_by) 218 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 219 220 return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
221 222 documents = property(get_documents, lambda x:x) 223 224 #--------------------------------------------------------
225 - def add_document(self, document_type=None, encounter=None, episode=None, link_obj=None):
226 return create_document(link_obj = link_obj, document_type = document_type, encounter = encounter, episode = episode)
227 228 #--------------------------------------------------------
229 - def add_prescription(self, encounter=None, episode=None, link_obj=None):
230 return self.add_document ( 231 link_obj = link_obj, 232 document_type = create_document_type ( 233 document_type = DOCUMENT_TYPE_PRESCRIPTION 234 )['pk_doc_type'], 235 encounter = encounter, 236 episode = episode 237 )
238 239 #--------------------------------------------------------
241 cmd = gmOrganization._SQL_get_org_unit % ( 242 'pk_org_unit IN (SELECT DISTINCT ON (pk_org_unit) pk_org_unit FROM blobs.v_doc_med WHERE pk_patient = %(pat)s)' 243 ) 244 args = {'pat': self.pk_patient} 245 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 246 return [ gmOrganization.cOrgUnit(row = {'data': r, 'idx': idx, 'pk_field': 'pk_org_unit'}) for r in rows ]
247 248 all_document_org_units = property(_get_all_document_org_units, lambda x:x)
249 250 #============================================================ 251 _sql_fetch_document_part_fields = "select * from blobs.v_obj4doc_no_data where %s" 252
253 -class cDocumentPart(gmBusinessDBObject.cBusinessDBObject):
254 """Represents one part of a medical document.""" 255 256 _cmd_fetch_payload = _sql_fetch_document_part_fields % "pk_obj = %s" 257 _cmds_store_payload = [ 258 """UPDATE blobs.doc_obj SET 259 seq_idx = %(seq_idx)s, 260 comment = gm.nullify_empty_string(%(obj_comment)s), 261 filename = gm.nullify_empty_string(%(filename)s), 262 fk_intended_reviewer = %(pk_intended_reviewer)s 263 WHERE 264 pk = %(pk_obj)s 265 AND 266 xmin = %(xmin_doc_obj)s 267 RETURNING 268 xmin AS xmin_doc_obj""" 269 ] 270 _updatable_fields = [ 271 'seq_idx', 272 'obj_comment', 273 'pk_intended_reviewer', 274 'filename' 275 ] 276 #-------------------------------------------------------- 277 # retrieve data 278 #--------------------------------------------------------
279 - def save_to_file(self, aChunkSize=0, filename=None, target_mime=None, target_extension=None, ignore_conversion_problems=False, directory=None, adjust_extension=False, conn=None):
280 281 if self._payload[self._idx['size']] == 0: 282 return None 283 284 if filename is None: 285 filename = self.get_useful_filename(make_unique = True, directory = directory) 286 287 success = gmPG2.bytea2file ( 288 data_query = { 289 'cmd': 'SELECT substring(data from %(start)s for %(size)s) FROM blobs.doc_obj WHERE pk=%(pk)s', 290 'args': {'pk': self.pk_obj} 291 }, 292 filename = filename, 293 chunk_size = aChunkSize, 294 data_size = self._payload[self._idx['size']], 295 conn = conn 296 ) 297 if not success: 298 return None 299 300 if target_mime is None: 301 if filename.endswith('.dat'): 302 if adjust_extension: 303 return gmMimeLib.adjust_extension_by_mimetype(filename) 304 return filename 305 306 if target_extension is None: 307 target_extension = gmMimeLib.guess_ext_by_mimetype(mimetype = target_mime) 308 309 target_path, name = os.path.split(filename) 310 name, tmp = os.path.splitext(name) 311 target_fname = gmTools.get_unique_filename ( 312 prefix = '%s-conv-' % name, 313 suffix = target_extension 314 ) 315 _log.debug('attempting conversion: [%s] -> [<%s>:%s]', filename, target_mime, target_fname) 316 if gmMimeLib.convert_file ( 317 filename = filename, 318 target_mime = target_mime, 319 target_filename = target_fname 320 ): 321 return target_fname 322 323 _log.warning('conversion failed') 324 if not ignore_conversion_problems: 325 return None 326 327 if filename.endswith('.dat'): 328 if adjust_extension: 329 filename = gmMimeLib.adjust_extension_by_mimetype(filename) 330 _log.warning('programmed to ignore conversion problems, hoping receiver can handle [%s]', filename) 331 return filename
332 333 #--------------------------------------------------------
334 - def get_reviews(self):
335 cmd = """ 336 SELECT 337 reviewer, 338 reviewed_when, 339 is_technically_abnormal, 340 clinically_relevant, 341 is_review_by_responsible_reviewer, 342 is_your_review, 343 coalesce(comment, '') 344 FROM blobs.v_reviewed_doc_objects 345 WHERE pk_doc_obj = %s 346 ORDER BY 347 is_your_review desc, 348 is_review_by_responsible_reviewer desc, 349 reviewed_when desc 350 """ 351 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}]) 352 return rows
353 354 #--------------------------------------------------------
355 - def get_containing_document(self):
356 return cDocument(aPK_obj = self._payload[self._idx['pk_doc']])
357 358 #-------------------------------------------------------- 359 # store data 360 #--------------------------------------------------------
361 - def update_data_from_file(self, fname=None, link_obj=None):
362 # sanity check 363 if not (os.access(fname, os.R_OK) and os.path.isfile(fname)): 364 _log.error('[%s] is not a readable file' % fname) 365 return False 366 367 if not gmPG2.file2bytea ( 368 conn = link_obj, 369 query = "UPDATE blobs.doc_obj SET data = %(data)s::bytea WHERE pk = %(pk)s RETURNING md5(data) AS md5", 370 filename = fname, 371 args = {'pk': self.pk_obj}, 372 file_md5 = gmTools.file2md5(filename = fname, return_hex = True) 373 ): 374 return False 375 376 # must update XMIN now ... 377 self.refetch_payload(link_obj = link_obj) 378 return True
379 380 #--------------------------------------------------------
381 - def set_reviewed(self, technically_abnormal=None, clinically_relevant=None):
382 # row already there ? 383 cmd = """ 384 select pk 385 from blobs.reviewed_doc_objs 386 where 387 fk_reviewed_row = %s and 388 fk_reviewer = (select pk from dem.staff where db_user = current_user)""" 389 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}]) 390 391 # INSERT needed 392 if len(rows) == 0: 393 cols = [ 394 "fk_reviewer", 395 "fk_reviewed_row", 396 "is_technically_abnormal", 397 "clinically_relevant" 398 ] 399 vals = [ 400 '%(fk_row)s', 401 '%(abnormal)s', 402 '%(relevant)s' 403 ] 404 args = { 405 'fk_row': self.pk_obj, 406 'abnormal': technically_abnormal, 407 'relevant': clinically_relevant 408 } 409 cmd = """ 410 insert into blobs.reviewed_doc_objs ( 411 %s 412 ) values ( 413 (select pk from dem.staff where db_user=current_user), 414 %s 415 )""" % (', '.join(cols), ', '.join(vals)) 416 417 # UPDATE needed 418 if len(rows) == 1: 419 pk_review = rows[0][0] 420 args = { 421 'abnormal': technically_abnormal, 422 'relevant': clinically_relevant, 423 'pk_review': pk_review 424 } 425 cmd = """ 426 UPDATE blobs.reviewed_doc_objs SET 427 is_technically_abnormal = %(abnormal)s, 428 clinically_relevant = %(relevant)s 429 WHERE 430 pk = %(pk_review)s 431 """ 432 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) 433 434 return True
435 436 #--------------------------------------------------------
437 - def set_as_active_photograph(self):
438 if self._payload[self._idx['type']] != 'patient photograph': 439 return False 440 # set seq_idx to current max + 1 441 cmd = 'SELECT coalesce(max(seq_idx)+1, 1) FROM blobs.doc_obj WHERE fk_doc = %(doc_id)s' 442 rows, idx = gmPG2.run_ro_queries ( 443 queries = [{ 444 'cmd': cmd, 445 'args': {'doc_id': self._payload[self._idx['pk_doc']]} 446 }] 447 ) 448 self._payload[self._idx['seq_idx']] = rows[0][0] 449 self._is_modified = True 450 self.save_payload()
451 452 #--------------------------------------------------------
453 - def reattach(self, pk_doc=None):
454 if pk_doc == self._payload[self._idx['pk_doc']]: 455 return True 456 457 cmd = """ 458 UPDATE blobs.doc_obj SET 459 fk_doc = %(pk_doc_target)s, 460 -- coalesce needed for no-parts target docs 461 seq_idx = (SELECT coalesce(max(seq_idx) + 1, 1) FROM blobs.doc_obj WHERE fk_doc = %(pk_doc_target)s) 462 WHERE 463 EXISTS(SELECT 1 FROM blobs.doc_med WHERE pk = %(pk_doc_target)s) 464 AND 465 pk = %(pk_obj)s 466 AND 467 xmin = %(xmin_doc_obj)s 468 RETURNING fk_doc 469 """ 470 args = { 471 'pk_doc_target': pk_doc, 472 'pk_obj': self.pk_obj, 473 'xmin_doc_obj': self._payload[self._idx['xmin_doc_obj']] 474 } 475 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True, get_col_idx = False) 476 if len(rows) == 0: 477 return False 478 # The following should never hold true because the target 479 # fk_doc is returned from the query and it is checked for 480 # equality before the UPDATE already. Assuming the update 481 # failed to update a row because the target fk_doc did 482 # not exist we would not get *any* rows in return - for 483 # which condition we also already checked 484 if rows[0]['fk_doc'] == self._payload[self._idx['pk_doc']]: 485 return False 486 487 self.refetch_payload() 488 return True
489 #--------------------------------------------------------
490 - def display_via_mime(self, chunksize=0, block=None):
491 492 fname = self.save_to_file(aChunkSize = chunksize) 493 if fname is None: 494 return False, '' 495 496 success, msg = gmMimeLib.call_viewer_on_file(fname, block = block) 497 if not success: 498 return False, msg 499 500 return True, ''
501 502 #--------------------------------------------------------
503 - def format_single_line(self):
504 f_ext = '' 505 if self._payload[self._idx['filename']] is not None: 506 f_ext = os.path.splitext(self._payload[self._idx['filename']])[1].strip('.').strip() 507 if f_ext != '': 508 f_ext = ' .' + f_ext.upper() 509 txt = _('part %s, %s%s%s of document %s from %s%s') % ( 510 self._payload[self._idx['seq_idx']], 511 gmTools.size2str(self._payload[self._idx['size']]), 512 f_ext, 513 gmTools.coalesce(self._payload[self._idx['obj_comment']], '', ' ("%s")'), 514 self._payload[self._idx['l10n_type']], 515 gmDateTime.pydt_strftime(self._payload[self._idx['date_generated']], '%Y %b %d'), 516 gmTools.coalesce(self._payload[self._idx['doc_comment']], '', ' ("%s")') 517 ) 518 return txt
519 520 #--------------------------------------------------------
521 - def format(self, single_line=False):
522 if single_line: 523 return self.format_single_line() 524 525 txt = _('%s document part [#%s]\n') % ( 526 gmTools.bool2str ( 527 boolean = self._payload[self._idx['reviewed']], 528 true_str = _('Reviewed'), 529 false_str = _('Unreviewed') 530 ), 531 self._payload[self._idx['pk_obj']] 532 ) 533 534 f_ext = '' 535 if self._payload[self._idx['filename']] is not None: 536 f_ext = os.path.splitext(self._payload[self._idx['filename']])[1].strip('.').strip() 537 if f_ext != '': 538 f_ext = '.' + f_ext.upper() + ' ' 539 txt += _(' Part %s: %s %s(%s Bytes)\n') % ( 540 self._payload[self._idx['seq_idx']], 541 gmTools.size2str(self._payload[self._idx['size']]), 542 f_ext, 543 self._payload[self._idx['size']] 544 ) 545 546 if self._payload[self._idx['filename']] is not None: 547 path, fname = os.path.split(self._payload[self._idx['filename']]) 548 if not path.endswith(os.path.sep): 549 if path != '': 550 path += os.path.sep 551 if path != '': 552 path = ' (%s)' % path 553 txt += _(' Filename: %s%s\n') % (fname, path) 554 555 if self._payload[self._idx['obj_comment']] is not None: 556 txt += '\n%s\n' % self._payload[self._idx['obj_comment']] 557 558 return txt
559 560 #--------------------------------------------------------
561 - def get_useful_filename(self, patient=None, make_unique=False, directory=None, include_gnumed_tag=True, date_before_type=False, name_first=True):
562 patient_part = '' 563 if patient is not None: 564 if name_first: 565 patient_part = '%s-' % patient.subdir_name 566 else: 567 patient_part = '-%s' % patient.subdir_name 568 569 # preserve original filename extension if available 570 suffix = '.dat' 571 if self._payload[self._idx['filename']] is not None: 572 tmp, suffix = os.path.splitext ( 573 gmTools.fname_sanitize(self._payload[self._idx['filename']]).lower() 574 ) 575 if suffix == '': 576 suffix = '.dat' 577 578 if include_gnumed_tag: 579 fname_template = 'gm_doc-part_%s-%%s' % self._payload[self._idx['seq_idx']] 580 else: 581 fname_template = '%%s-part_%s' % self._payload[self._idx['seq_idx']] 582 583 if date_before_type: 584 date_type_part = '%s-%s' % ( 585 gmDateTime.pydt_strftime(self._payload[self._idx['date_generated']], '%Y-%m-%d', 'utf-8', gmDateTime.acc_days), 586 self._payload[self._idx['l10n_type']].replace(' ', '_').replace('-', '_'), 587 ) 588 else: 589 date_type_part = '%s-%s' % ( 590 self._payload[self._idx['l10n_type']].replace(' ', '_').replace('-', '_'), 591 gmDateTime.pydt_strftime(self._payload[self._idx['date_generated']], '%Y-%m-%d', 'utf-8', gmDateTime.acc_days) 592 ) 593 594 if name_first: 595 date_type_name_part = patient_part + date_type_part 596 else: 597 date_type_name_part = date_type_part + patient_part 598 599 fname = fname_template % date_type_name_part 600 601 if make_unique: 602 fname = gmTools.get_unique_filename ( 603 prefix = '%s-' % gmTools.fname_sanitize(fname), 604 suffix = suffix, 605 tmp_dir = directory 606 ) 607 else: 608 fname = gmTools.fname_sanitize(os.path.join(gmTools.coalesce(directory, ''), fname + suffix)) 609 610 return fname
611 612 #------------------------------------------------------------
613 -def delete_document_part(part_pk=None, encounter_pk=None):
614 cmd = """ 615 SELECT blobs.delete_document_part(%(pk)s, %(enc)s) 616 WHERE NOT EXISTS 617 (SELECT 1 FROM clin.export_item where fk_doc_obj = %(pk)s) 618 """ 619 args = {'pk': part_pk, 'enc': encounter_pk} 620 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) 621 return
622 623 #============================================================ 624 _sql_fetch_document_fields = "SELECT * FROM blobs.v_doc_med b_vdm WHERE %s" 625
626 -class cDocument(gmBusinessDBObject.cBusinessDBObject):
627 """Represents one medical document.""" 628 629 _cmd_fetch_payload = _sql_fetch_document_fields % "pk_doc = %s" 630 _cmds_store_payload = [ 631 """UPDATE blobs.doc_med SET 632 fk_type = %(pk_type)s, 633 fk_episode = %(pk_episode)s, 634 fk_encounter = %(pk_encounter)s, 635 fk_org_unit = %(pk_org_unit)s, 636 unit_is_receiver = %(unit_is_receiver)s, 637 clin_when = %(clin_when)s, 638 comment = gm.nullify_empty_string(%(comment)s), 639 ext_ref = gm.nullify_empty_string(%(ext_ref)s), 640 fk_hospital_stay = %(pk_hospital_stay)s 641 WHERE 642 pk = %(pk_doc)s and 643 xmin = %(xmin_doc_med)s 644 RETURNING 645 xmin AS xmin_doc_med""" 646 ] 647 _updatable_fields = [ 648 'pk_type', 649 'comment', 650 'clin_when', 651 'ext_ref', 652 'pk_episode', 653 'pk_encounter', # mainly useful when moving visual progress notes to their respective encounters 654 'pk_org_unit', 655 'unit_is_receiver', 656 'pk_hospital_stay' 657 ] 658 659 #--------------------------------------------------------
660 - def refetch_payload(self, ignore_changes=False, link_obj=None):
661 try: del self.__has_unreviewed_parts 662 except AttributeError: pass 663 664 return super(cDocument, self).refetch_payload(ignore_changes = ignore_changes, link_obj = link_obj)
665 666 #--------------------------------------------------------
667 - def get_descriptions(self, max_lng=250):
668 """Get document descriptions. 669 670 - will return a list of rows 671 """ 672 if max_lng is None: 673 cmd = "SELECT pk, text FROM blobs.doc_desc WHERE fk_doc = %s" 674 else: 675 cmd = "SELECT pk, substring(text from 1 for %s) FROM blobs.doc_desc WHERE fk_doc=%%s" % max_lng 676 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}]) 677 return rows
678 679 #--------------------------------------------------------
680 - def add_description(self, description=None):
681 cmd = "insert into blobs.doc_desc (fk_doc, text) values (%s, %s)" 682 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj, description]}]) 683 return True
684 685 #--------------------------------------------------------
686 - def update_description(self, pk=None, description=None):
687 cmd = "update blobs.doc_desc set text = %(desc)s where fk_doc = %(doc)s and pk = %(pk_desc)s" 688 gmPG2.run_rw_queries(queries = [ 689 {'cmd': cmd, 'args': {'doc': self.pk_obj, 'pk_desc': pk, 'desc': description}} 690 ]) 691 return True
692 693 #--------------------------------------------------------
694 - def delete_description(self, pk=None):
695 cmd = "delete from blobs.doc_desc where fk_doc = %(doc)s and pk = %(desc)s" 696 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': {'doc': self.pk_obj, 'desc': pk}}]) 697 return True
698 699 #--------------------------------------------------------
700 - def _get_parts(self):
701 cmd = _sql_fetch_document_part_fields % "pk_doc = %s ORDER BY seq_idx" 702 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}], get_col_idx = True) 703 return [ cDocumentPart(row = {'pk_field': 'pk_obj', 'idx': idx, 'data': r}) for r in rows ]
704 705 parts = property(_get_parts, lambda x:x) 706 707 #--------------------------------------------------------
708 - def add_part(self, file=None, link_obj=None):
709 """Add a part to the document.""" 710 # create dummy part 711 cmd = """ 712 INSERT INTO blobs.doc_obj ( 713 fk_doc, data, seq_idx 714 ) VALUES ( 715 %(doc_id)s, 716 ''::bytea, 717 (SELECT coalesce(max(seq_idx)+1, 1) FROM blobs.doc_obj WHERE fk_doc = %(doc_id)s) 718 ) RETURNING pk""" 719 rows, idx = gmPG2.run_rw_queries ( 720 link_obj = link_obj, 721 queries = [{'cmd': cmd, 'args': {'doc_id': self.pk_obj}}], 722 return_data = True 723 ) 724 # init document part instance 725 pk_part = rows[0][0] 726 new_part = cDocumentPart(aPK_obj = pk_part, link_obj = link_obj) 727 if not new_part.update_data_from_file(link_obj = link_obj, fname = file): 728 _log.error('cannot import binary data from [%s] into document part' % file) 729 gmPG2.run_rw_queries ( 730 link_obj = link_obj, 731 queries = [{'cmd': "DELETE FROM blobs.doc_obj WHERE pk = %s", 'args': [pk_part]}] 732 ) 733 return None 734 new_part['filename'] = file 735 new_part.save_payload(conn = link_obj) 736 737 return new_part
738 739 #--------------------------------------------------------
740 - def add_parts_from_files(self, files=None, reviewer=None):
741 742 new_parts = [] 743 744 for filename in files: 745 new_part = self.add_part(file = filename) 746 if new_part is None: 747 msg = 'cannot instantiate document part object from [%s]' % filename 748 _log.error(msg) 749 return (False, msg, filename) 750 new_parts.append(new_part) 751 752 if reviewer is not None: 753 new_part['pk_intended_reviewer'] = reviewer # None == Null 754 success, data = new_part.save_payload() 755 if not success: 756 msg = 'cannot set reviewer to [%s] on [%s]' % (reviewer, filename) 757 _log.error(msg) 758 _log.error(str(data)) 759 return (False, msg, filename) 760 761 return (True, '', new_parts)
762 763 #--------------------------------------------------------
764 - def save_parts_to_files(self, export_dir=None, chunksize=0, conn=None):
765 fnames = [] 766 for part in self.parts: 767 fname = part.save_to_file(aChunkSize = chunksize, directory = export_dir, conn = conn) 768 # if export_dir is not None: 769 # shutil.move(fname, export_dir) 770 # fname = os.path.join(export_dir, os.path.split(fname)[1]) 771 if fname is None: 772 _log.error('cannot export document part [%s]', part) 773 continue 774 fnames.append(fname) 775 return fnames
776 777 #--------------------------------------------------------
778 - def _get_has_unreviewed_parts(self):
779 try: 780 return self.__has_unreviewed_parts 781 except AttributeError: 782 pass 783 784 cmd = "SELECT EXISTS(SELECT 1 FROM blobs.v_obj4doc_no_data WHERE pk_doc = %(pk)s AND reviewed IS FALSE)" 785 args = {'pk': self.pk_obj} 786 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) 787 self.__has_unreviewed_parts = rows[0][0] 788 789 return self.__has_unreviewed_parts
790 791 has_unreviewed_parts = property(_get_has_unreviewed_parts, lambda x:x) 792 793 #--------------------------------------------------------
794 - def set_reviewed(self, technically_abnormal=None, clinically_relevant=None):
795 # FIXME: this is probably inefficient 796 for part in self.parts: 797 if not part.set_reviewed(technically_abnormal, clinically_relevant): 798 return False 799 return True
800 801 #--------------------------------------------------------
802 - def set_primary_reviewer(self, reviewer=None):
803 for part in self.parts: 804 part['pk_intended_reviewer'] = reviewer 805 success, data = part.save_payload() 806 if not success: 807 _log.error('cannot set reviewer to [%s]' % reviewer) 808 _log.error(str(data)) 809 return False 810 return True
811 812 #--------------------------------------------------------
813 - def format_single_line(self):
814 815 part_count = len(self._payload[self._idx['seq_idx_list']]) 816 if part_count == 0: 817 parts = _('no parts') 818 elif part_count == 1: 819 parts = _('1 part') 820 else: 821 parts = _('%s parts') % part_count 822 823 detail = '' 824 if self._payload[self._idx['ext_ref']] is not None: 825 detail = self._payload[self._idx['ext_ref']] 826 if self._payload[self._idx['unit']] is not None: 827 template = _('%s of %s') 828 if detail == '': 829 detail = _('%s of %s') % ( 830 self._payload[self._idx['unit']], 831 self._payload[self._idx['organization']] 832 ) 833 else: 834 detail += (' @ ' + template % ( 835 self._payload[self._idx['unit']], 836 self._payload[self._idx['organization']] 837 )) 838 if detail != '': 839 detail = ' (%s)' % detail 840 841 return '%s %s (%s):%s%s' % ( 842 gmDateTime.pydt_strftime(self._payload[self._idx['clin_when']], '%Y %b %d', accuracy = gmDateTime.acc_days), 843 self._payload[self._idx['l10n_type']], 844 parts, 845 gmTools.coalesce(self._payload[self._idx['comment']], '', ' "%s"'), 846 detail 847 )
848 849 #--------------------------------------------------------
850 - def format(self, single_line=False):
851 if single_line: 852 return self.format_single_line() 853 854 part_count = len(self._payload[self._idx['seq_idx_list']]) 855 if part_count == 0: 856 parts = _('no parts') 857 elif part_count == 1: 858 parts = _('1 part') 859 else: 860 parts = _('%s parts') % part_count 861 org = '' 862 if self._payload[self._idx['unit']] is not None: 863 if self._payload[self._idx['unit_is_receiver']]: 864 org = _(' Receiver: %s @ %s\n') % ( 865 self._payload[self._idx['unit']], 866 self._payload[self._idx['organization']] 867 ) 868 else: 869 org = _(' Sender: %s @ %s\n') % ( 870 self._payload[self._idx['unit']], 871 self._payload[self._idx['organization']] 872 ) 873 stay = '' 874 if self._payload[self._idx['pk_hospital_stay']] is not None: 875 stay = _('Hospital stay') + ': %s\n' % self.hospital_stay.format ( 876 left_margin = 0, 877 include_procedures = False, 878 include_docs = False, 879 include_episode = False 880 ) 881 882 txt = _( 883 '%s (%s) #%s\n' 884 ' Created: %s\n' 885 ' Episode: %s\n' 886 '%s' 887 '%s' 888 '%s' 889 '%s' 890 '%s' 891 ) % ( 892 self._payload[self._idx['l10n_type']], 893 parts, 894 self._payload[self._idx['pk_doc']], 895 gmDateTime.pydt_strftime(self._payload[self._idx['clin_when']], format = '%Y %b %d', accuracy = gmDateTime.acc_days), 896 self._payload[self._idx['episode']], 897 gmTools.coalesce(self._payload[self._idx['health_issue']], '', _(' Health issue: %s\n')), 898 gmTools.coalesce(self._payload[self._idx['ext_ref']], '', _(' External reference: %s\n')), 899 org, 900 stay, 901 gmTools.coalesce(self._payload[self._idx['comment']], '', ' %s') 902 ) 903 904 return txt
905 906 #--------------------------------------------------------
907 - def _get_hospital_stay(self):
908 if self._payload[self._idx['pk_hospital_stay']] is None: 909 return None 910 from Gnumed.business import gmEMRStructItems 911 return gmEMRStructItems.cHospitalStay(self._payload[self._idx['pk_hospital_stay']])
912 913 hospital_stay = property(_get_hospital_stay, lambda x:x) 914 915 #--------------------------------------------------------
916 - def _get_org_unit(self):
917 if self._payload[self._idx['pk_org_unit']] is None: 918 return None 919 return gmOrganization.cOrgUnit(self._payload[self._idx['pk_org_unit']])
920 921 org_unit = property(_get_org_unit, lambda x:x) 922 923 #--------------------------------------------------------
924 - def _get_procedures(self):
925 from Gnumed.business.gmEMRStructItems import get_procedures4document 926 return get_procedures4document(pk_document = self.pk_obj)
927 928 procedures = property(_get_procedures, lambda x:x) 929 930 #--------------------------------------------------------
931 - def _get_bills(self):
932 from Gnumed.business.gmBilling import get_bills4document 933 return get_bills4document(pk_document = self.pk_obj)
934 935 bills = property(_get_bills, lambda x:x)
936 937 #------------------------------------------------------------
938 -def create_document(document_type=None, encounter=None, episode=None, link_obj=None):
939 """Returns new document instance or raises an exception.""" 940 try: 941 int(document_type) 942 cmd = """INSERT INTO blobs.doc_med (fk_type, fk_encounter, fk_episode) VALUES (%(type)s, %(enc)s, %(epi)s) RETURNING pk""" 943 except ValueError: 944 create_document_type(document_type = document_type) 945 cmd = """ 946 INSERT INTO blobs.doc_med ( 947 fk_type, 948 fk_encounter, 949 fk_episode 950 ) VALUES ( 951 coalesce ( 952 (SELECT pk from blobs.doc_type bdt where bdt.name = %(type)s), 953 (SELECT pk from blobs.doc_type bdt where _(bdt.name) = %(type)s) 954 ), 955 %(enc)s, 956 %(epi)s 957 ) RETURNING pk""" 958 args = {'type': document_type, 'enc': encounter, 'epi': episode} 959 rows, idx = gmPG2.run_rw_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], return_data = True) 960 doc = cDocument(aPK_obj = rows[0][0], link_obj = link_obj) 961 return doc
962 963 #------------------------------------------------------------
964 -def search_for_documents(patient_id=None, type_id=None, external_reference=None, pk_episode=None, pk_types=None):
965 """Searches for documents with the given patient and type ID.""" 966 967 if (patient_id is None) and (pk_episode is None): 968 raise ValueError('need patient_id or pk_episode to search for document') 969 970 where_parts = [] 971 args = { 972 'pat_id': patient_id, 973 'type_id': type_id, 974 'ref': external_reference, 975 'pk_epi': pk_episode 976 } 977 978 if patient_id is not None: 979 where_parts.append('pk_patient = %(pat_id)s') 980 981 if type_id is not None: 982 where_parts.append('pk_type = %(type_id)s') 983 984 if external_reference is not None: 985 where_parts.append('ext_ref = %(ref)s') 986 987 if pk_episode is not None: 988 where_parts.append('pk_episode = %(pk_epi)s') 989 990 if pk_types is not None: 991 where_parts.append('pk_type IN %(pk_types)s') 992 args['pk_types'] = tuple(pk_types) 993 994 cmd = _sql_fetch_document_fields % ' AND '.join(where_parts) 995 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 996 return [ cDocument(row = {'data': r, 'idx': idx, 'pk_field': 'pk_doc'}) for r in rows ]
997 998 #------------------------------------------------------------
999 -def delete_document(document_id=None, encounter_id=None):
1000 # cascades to doc_obj and doc_desc but not bill.bill 1001 cmd = "SELECT blobs.delete_document(%(pk)s, %(enc)s)" 1002 args = {'pk': document_id, 'enc': encounter_id} 1003 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True) 1004 if not rows[0][0]: 1005 _log.error('cannot delete document [%s]', document_id) 1006 return False 1007 return True
1008 1009 #------------------------------------------------------------
1010 -def reclassify_documents_by_type(original_type=None, target_type=None):
1011 1012 _log.debug('reclassifying documents by type') 1013 _log.debug('original: %s', original_type) 1014 _log.debug('target: %s', target_type) 1015 1016 if target_type['pk_doc_type'] == original_type['pk_doc_type']: 1017 return True 1018 1019 cmd = """ 1020 update blobs.doc_med set 1021 fk_type = %(new_type)s 1022 where 1023 fk_type = %(old_type)s 1024 """ 1025 args = {'new_type': target_type['pk_doc_type'], 'old_type': original_type['pk_doc_type']} 1026 1027 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) 1028 1029 return True
1030 1031 #============================================================
1032 -class cDocumentType(gmBusinessDBObject.cBusinessDBObject):
1033 """Represents a document type.""" 1034 _cmd_fetch_payload = """select * from blobs.v_doc_type where pk_doc_type=%s""" 1035 _cmds_store_payload = [ 1036 """update blobs.doc_type set 1037 name = %(type)s 1038 where 1039 pk=%(pk_obj)s and 1040 xmin=%(xmin_doc_type)s""", 1041 """select xmin_doc_type from blobs.v_doc_type where pk_doc_type = %(pk_obj)s""" 1042 ] 1043 _updatable_fields = ['type'] 1044 #--------------------------------------------------------
1045 - def set_translation(self, translation=None):
1046 1047 if translation.strip() == '': 1048 return False 1049 1050 if translation.strip() == self._payload[self._idx['l10n_type']].strip(): 1051 return True 1052 1053 rows, idx = gmPG2.run_rw_queries ( 1054 queries = [ 1055 {'cmd': 'select i18n.i18n(%s)', 'args': [self._payload[self._idx['type']]]}, 1056 {'cmd': 'select i18n.upd_tx((select i18n.get_curr_lang()), %(orig)s, %(tx)s)', 1057 'args': { 1058 'orig': self._payload[self._idx['type']], 1059 'tx': translation 1060 } 1061 } 1062 ], 1063 return_data = True 1064 ) 1065 if not rows[0][0]: 1066 _log.error('cannot set translation to [%s]' % translation) 1067 return False 1068 1069 return self.refetch_payload()
1070 1071 #------------------------------------------------------------
1072 -def get_document_types():
1073 rows, idx = gmPG2.run_ro_queries ( 1074 queries = [{'cmd': "SELECT * FROM blobs.v_doc_type"}], 1075 get_col_idx = True 1076 ) 1077 doc_types = [] 1078 for row in rows: 1079 row_def = {'pk_field': 'pk_doc_type', 'idx': idx, 'data': row} 1080 doc_types.append(cDocumentType(row = row_def)) 1081 return doc_types
1082 1083 #------------------------------------------------------------
1084 -def get_document_type_pk(document_type=None):
1085 args = {'typ': document_type.strip()} 1086 1087 cmd = 'SELECT pk FROM blobs.doc_type WHERE name = %(typ)s' 1088 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 1089 if len(rows) == 0: 1090 cmd = 'SELECT pk FROM blobs.doc_type WHERE _(name) = %(typ)s' 1091 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 1092 1093 if len(rows) == 0: 1094 return None 1095 1096 return rows[0]['pk']
1097 1098 #------------------------------------------------------------
1099 -def map_types2pk(document_types=None):
1100 args = {'types': tuple(document_types)} 1101 cmd = 'SELECT pk_doc_type, coalesce(l10n_type, type) as desc FROM blobs.v_doc_type WHERE l10n_type IN %(types)s OR type IN %(types)s' 1102 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 1103 return rows
1104 1105 #------------------------------------------------------------
1106 -def create_document_type(document_type=None):
1107 # check for potential dupes: 1108 cmd = 'SELECT pk FROM blobs.doc_type WHERE name = %s' 1109 rows, idx = gmPG2.run_ro_queries ( 1110 queries = [{'cmd': cmd, 'args': [document_type]}] 1111 ) 1112 if len(rows) == 0: 1113 _log.debug('creating document type [%s]', document_type) 1114 cmd1 = "INSERT INTO blobs.doc_type (name) VALUES (%s) RETURNING pk" 1115 rows, idx = gmPG2.run_rw_queries ( 1116 queries = [{'cmd': cmd1, 'args': [document_type]}], 1117 return_data = True 1118 ) 1119 return cDocumentType(aPK_obj = rows[0][0])
1120 1121 #------------------------------------------------------------
1122 -def delete_document_type(document_type=None):
1123 if document_type['is_in_use']: 1124 return False 1125 gmPG2.run_rw_queries ( 1126 queries = [{ 1127 'cmd': 'delete from blobs.doc_type where pk=%s', 1128 'args': [document_type['pk_doc_type']] 1129 }] 1130 ) 1131 return True
1132 1133 #------------------------------------------------------------
1134 -def get_ext_ref():
1135 """This needs *considerably* more smarts.""" 1136 dirname = gmTools.get_unique_filename ( 1137 prefix = '', 1138 suffix = time.strftime(".%Y%m%d-%H%M%S", time.localtime()) 1139 ) 1140 # extract name for dir 1141 path, doc_ID = os.path.split(dirname) 1142 return doc_ID
1143 1144 #============================================================ 1145 # main 1146 #------------------------------------------------------------ 1147 if __name__ == '__main__': 1148 1149 if len(sys.argv) < 2: 1150 sys.exit() 1151 1152 if sys.argv[1] != 'test': 1153 sys.exit() 1154 1155 #--------------------------------------------------------
1156 - def test_doc_types():
1157 1158 print("----------------------") 1159 print("listing document types") 1160 print("----------------------") 1161 1162 for dt in get_document_types(): 1163 print(dt) 1164 1165 print("------------------------------") 1166 print("testing document type handling") 1167 print("------------------------------") 1168 1169 dt = create_document_type(document_type = 'dummy doc type for unit test 1') 1170 print("created:", dt) 1171 1172 dt['type'] = 'dummy doc type for unit test 2' 1173 dt.save_payload() 1174 print("changed base name:", dt) 1175 1176 dt.set_translation(translation = 'Dummy-Dokumenten-Typ fuer Unit-Test') 1177 print("translated:", dt) 1178 1179 print("deleted:", delete_document_type(document_type = dt)) 1180 1181 return
1182 #--------------------------------------------------------
1183 - def test_adding_doc_part():
1184 1185 print("-----------------------") 1186 print("testing document import") 1187 print("-----------------------") 1188 1189 docs = search_for_documents(patient_id=12) 1190 doc = docs[0] 1191 print("adding to doc:", doc) 1192 1193 fname = sys.argv[1] 1194 print("adding from file:", fname) 1195 part = doc.add_part(file=fname) 1196 print("new part:", part) 1197 1198 return
1199 #--------------------------------------------------------
1200 - def test_get_documents():
1201 1202 doc_folder = cDocumentFolder(aPKey=12) 1203 1204 #photo = doc_folder.get_latest_mugshot() 1205 #print type(photo), photo 1206 1207 docs = doc_folder.get_documents() 1208 for doc in docs: 1209 #print type(doc), doc 1210 #print doc.parts 1211 #print doc.format_single_line() 1212 print('--------------------------') 1213 print(doc.format(single_line = True)) 1214 print(doc.format())
1215 #pprint(gmBusinessDBObject.jsonclasshintify(docs)) 1216 #--------------------------------------------------------
1217 - def test_get_useful_filename():
1218 pk = 12 1219 from Gnumed.business.gmPerson import cPatient 1220 pat = cPatient(pk) 1221 doc_folder = cDocumentFolder(aPKey = pk) 1222 for doc in doc_folder.documents: 1223 for part in doc.parts: 1224 print(part.get_useful_filename ( 1225 patient = pat, 1226 make_unique = True, 1227 directory = None, 1228 include_gnumed_tag = False, 1229 date_before_type = True, 1230 name_first = False 1231 ))
1232 1233 #-------------------------------------------------------- 1234 from Gnumed.pycommon import gmI18N 1235 gmI18N.activate_locale() 1236 gmI18N.install_domain() 1237 1238 #test_doc_types() 1239 #test_adding_doc_part() 1240 test_get_documents() 1241 #test_get_useful_filename() 1242 1243 # print get_ext_ref() 1244