Package Gnumed :: Package business :: Module gmDocuments
[frames] | no frames]

Source Code for Module Gnumed.business.gmDocuments

  1  """This module encapsulates a document stored in a GNUmed database.""" 
  2  #============================================================ 
  3  __author__ = "Karsten Hilbert <Karsten.Hilbert@gmx.net>" 
  4  __license__ = "GPL v2 or later" 
  5   
  6  import sys, os, shutil, os.path, types, time, logging 
  7   
  8   
  9  if __name__ == '__main__': 
 10          sys.path.insert(0, '../../') 
 11  from Gnumed.pycommon import gmExceptions 
 12  from Gnumed.pycommon import gmBusinessDBObject 
 13  from Gnumed.pycommon import gmPG2 
 14  from Gnumed.pycommon import gmTools 
 15  from Gnumed.pycommon import gmMimeLib 
 16  from Gnumed.pycommon import gmDateTime 
 17   
 18   
 19  _log = logging.getLogger('gm.docs') 
 20   
 21  MUGSHOT=26 
 22  DOCUMENT_TYPE_VISUAL_PROGRESS_NOTE = u'visual progress note' 
 23  DOCUMENT_TYPE_PRESCRIPTION = u'prescription' 
 24  #============================================================ 
25 -class cDocumentFolder:
26 """Represents a folder with medical documents for a single patient.""" 27
28 - def __init__(self, aPKey = None):
29 """Fails if 30 31 - patient referenced by aPKey does not exist 32 """ 33 self.pk_patient = aPKey # == identity.pk == primary key 34 if not self._pkey_exists(): 35 raise gmExceptions.ConstructorError, "No patient with PK [%s] in database." % aPKey 36 37 # register backend notification interests 38 # (keep this last so we won't hang on threads when 39 # failing this constructor for other reasons ...) 40 # if not self._register_interests(): 41 # raise gmExceptions.ConstructorError, "cannot register signal interests" 42 43 _log.debug('instantiated document folder for patient [%s]' % self.pk_patient)
44 #--------------------------------------------------------
45 - def cleanup(self):
46 pass
47 #-------------------------------------------------------- 48 # internal helper 49 #--------------------------------------------------------
50 - def _pkey_exists(self):
51 """Does this primary key exist ? 52 53 - true/false/None 54 """ 55 # patient in demographic database ? 56 rows, idx = gmPG2.run_ro_queries(queries = [ 57 {'cmd': u"select exists(select pk from dem.identity where pk = %s)", 'args': [self.pk_patient]} 58 ]) 59 if not rows[0][0]: 60 _log.error("patient [%s] not in demographic database" % self.pk_patient) 61 return None 62 return True
63 #-------------------------------------------------------- 64 # API 65 #--------------------------------------------------------
67 cmd = u""" 68 SELECT pk_doc 69 FROM blobs.v_doc_med 70 WHERE 71 pk_patient = %(pat)s 72 AND 73 type = %(typ)s 74 AND 75 ext_ref = %(ref)s 76 ORDER BY 77 clin_when DESC 78 LIMIT 1 79 """ 80 args = { 81 'pat': self.pk_patient, 82 'typ': DOCUMENT_TYPE_PRESCRIPTION, 83 'ref': u'FreeDiams' 84 } 85 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) 86 if len(rows) == 0: 87 _log.info('no FreeDiams prescription available for patient [%s]' % self.pk_patient) 88 return None 89 prescription = cDocument(aPK_obj = rows[0][0]) 90 return prescription
91 #--------------------------------------------------------
92 - def get_latest_mugshot(self):
93 cmd = u"SELECT pk_obj FROM blobs.v_latest_mugshot WHERE pk_patient = %s" 94 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}]) 95 if len(rows) == 0: 96 _log.info('no mugshots available for patient [%s]' % self.pk_patient) 97 return None 98 return cDocumentPart(aPK_obj = rows[0][0])
99 100 latest_mugshot = property(get_latest_mugshot, lambda x:x) 101 #--------------------------------------------------------
102 - def get_mugshot_list(self, latest_only=True):
103 if latest_only: 104 cmd = u"select pk_doc, pk_obj from blobs.v_latest_mugshot where pk_patient=%s" 105 else: 106 cmd = u""" 107 select 108 vdm.pk_doc as pk_doc, 109 dobj.pk as pk_obj 110 from 111 blobs.v_doc_med vdm 112 blobs.doc_obj dobj 113 where 114 vdm.pk_type = (select pk from blobs.doc_type where name = 'patient photograph') 115 and vdm.pk_patient = %s 116 and dobj.fk_doc = vdm.pk_doc 117 """ 118 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}]) 119 return rows
120 #--------------------------------------------------------
121 - def get_doc_list(self, doc_type=None):
122 """return flat list of document IDs""" 123 124 args = { 125 'ID': self.pk_patient, 126 'TYP': doc_type 127 } 128 129 cmd = u""" 130 select vdm.pk_doc 131 from blobs.v_doc_med vdm 132 where 133 vdm.pk_patient = %%(ID)s 134 %s 135 order by vdm.clin_when""" 136 137 if doc_type is None: 138 cmd = cmd % u'' 139 else: 140 try: 141 int(doc_type) 142 cmd = cmd % u'and vdm.pk_type = %(TYP)s' 143 except (TypeError, ValueError): 144 cmd = cmd % u'and vdm.pk_type = (select pk from blobs.doc_type where name = %(TYP)s)' 145 146 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) 147 doc_ids = [] 148 for row in rows: 149 doc_ids.append(row[0]) 150 return doc_ids
151 #--------------------------------------------------------
152 - def get_visual_progress_notes(self, episodes=None, encounter=None):
153 return self.get_documents ( 154 doc_type = DOCUMENT_TYPE_VISUAL_PROGRESS_NOTE, 155 episodes = episodes, 156 encounter = encounter 157 )
158 #--------------------------------------------------------
159 - def get_unsigned_documents(self):
160 args = {'pat': self.pk_patient} 161 cmd = _sql_fetch_document_fields % u""" 162 pk_doc IN ( 163 SELECT DISTINCT ON (b_vo.pk_doc) b_vo.pk_doc 164 FROM blobs.v_obj4doc_no_data b_vo 165 WHERE 166 pk_patient = %(pat)s 167 AND 168 reviewed IS FALSE 169 ) 170 ORDER BY clin_when DESC""" 171 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 172 return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
173 #--------------------------------------------------------
174 - def get_documents(self, doc_type=None, episodes=None, encounter=None, order_by=None, exclude_unsigned=False):
175 """Return list of documents.""" 176 177 args = { 178 'pat': self.pk_patient, 179 'type': doc_type, 180 'enc': encounter 181 } 182 where_parts = [u'pk_patient = %(pat)s'] 183 184 if doc_type is not None: 185 try: 186 int(doc_type) 187 where_parts.append(u'pk_type = %(type)s') 188 except (TypeError, ValueError): 189 where_parts.append(u'pk_type = (SELECT pk FROM blobs.doc_type WHERE name = %(type)s)') 190 191 if (episodes is not None) and (len(episodes) > 0): 192 where_parts.append(u'pk_episode IN %(epi)s') 193 args['epi'] = tuple(episodes) 194 195 if encounter is not None: 196 where_parts.append(u'pk_encounter = %(enc)s') 197 198 if exclude_unsigned: 199 where_parts.append(u'pk_doc IN (SELECT b_vo.pk_doc FROM blobs.v_obj4doc_no_data b_vo WHERE b_vo.pk_patient = %(pat)s AND b_vo.reviewed IS TRUE)') 200 201 if order_by is None: 202 order_by = u'ORDER BY clin_when' 203 204 cmd = u"%s\n%s" % (_sql_fetch_document_fields % u' AND '.join(where_parts), order_by) 205 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 206 207 return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
208 209 documents = property(get_documents, lambda x:x) 210 #--------------------------------------------------------
211 - def add_document(self, document_type=None, encounter=None, episode=None):
212 return create_document(document_type = document_type, encounter = encounter, episode = episode)
213 #============================================================ 214 _sql_fetch_document_part_fields = u"select * from blobs.v_obj4doc_no_data where %s" 215
216 -class cDocumentPart(gmBusinessDBObject.cBusinessDBObject):
217 """Represents one part of a medical document.""" 218 219 _cmd_fetch_payload = _sql_fetch_document_part_fields % u"pk_obj = %s" 220 _cmds_store_payload = [ 221 u"""UPDATE blobs.doc_obj SET 222 seq_idx = %(seq_idx)s, 223 comment = gm.nullify_empty_string(%(obj_comment)s), 224 filename = gm.nullify_empty_string(%(filename)s), 225 fk_intended_reviewer = %(pk_intended_reviewer)s, 226 fk_doc = %(pk_doc)s 227 WHERE 228 pk = %(pk_obj)s 229 AND 230 xmin = %(xmin_doc_obj)s 231 RETURNING 232 xmin AS xmin_doc_obj""" 233 ] 234 _updatable_fields = [ 235 'seq_idx', 236 'obj_comment', 237 'pk_intended_reviewer', 238 'filename', 239 'pk_doc' 240 ] 241 #-------------------------------------------------------- 242 # retrieve data 243 #--------------------------------------------------------
244 - def export_to_file(self, aChunkSize=0, filename=None, target_mime=None, target_extension=None, ignore_conversion_problems=False):
245 246 if self._payload[self._idx['size']] == 0: 247 return None 248 249 if filename is None: 250 suffix = None 251 # preserve original filename extension if available 252 if self._payload[self._idx['filename']] is not None: 253 name, suffix = os.path.splitext(self._payload[self._idx['filename']]) 254 suffix = suffix.strip() 255 if suffix == u'': 256 suffix = None 257 # get unique filename 258 filename = gmTools.get_unique_filename ( 259 prefix = 'gm-doc_obj-page_%s-' % self._payload[self._idx['seq_idx']], 260 suffix = suffix 261 ) 262 263 success = gmPG2.bytea2file ( 264 data_query = { 265 'cmd': u'SELECT substring(data from %(start)s for %(size)s) FROM blobs.doc_obj WHERE pk=%(pk)s', 266 'args': {'pk': self.pk_obj} 267 }, 268 filename = filename, 269 chunk_size = aChunkSize, 270 data_size = self._payload[self._idx['size']] 271 ) 272 273 if not success: 274 return None 275 276 if target_mime is None: 277 return filename 278 279 if target_extension is None: 280 target_extension = gmMimeLib.guess_ext_by_mimetype(mimetype = target_mime) 281 282 target_fname = gmTools.get_unique_filename ( 283 prefix = 'gm-doc_obj-page_%s-converted-' % self._payload[self._idx['seq_idx']], 284 suffix = target_extension 285 ) 286 _log.debug('attempting conversion: [%s] -> [<%s>:%s]', filename, target_mime, target_fname) 287 if gmMimeLib.convert_file ( 288 filename = filename, 289 target_mime = target_mime, 290 target_filename = target_fname 291 ): 292 return target_fname 293 294 _log.warning('conversion failed') 295 if not ignore_conversion_problems: 296 return None 297 298 _log.warning('programmed to ignore conversion problems, hoping receiver can handle [%s]', filename) 299 return filename
300 #--------------------------------------------------------
301 - def get_reviews(self):
302 cmd = u""" 303 select 304 reviewer, 305 reviewed_when, 306 is_technically_abnormal, 307 clinically_relevant, 308 is_review_by_responsible_reviewer, 309 is_your_review, 310 coalesce(comment, '') 311 from blobs.v_reviewed_doc_objects 312 where pk_doc_obj = %s 313 order by 314 is_your_review desc, 315 is_review_by_responsible_reviewer desc, 316 reviewed_when desc 317 """ 318 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}]) 319 return rows
320 #--------------------------------------------------------
321 - def get_containing_document(self):
322 return cDocument(aPK_obj = self._payload[self._idx['pk_doc']])
323 #-------------------------------------------------------- 324 # store data 325 #--------------------------------------------------------
326 - def update_data_from_file(self, fname=None):
327 # sanity check 328 if not (os.access(fname, os.R_OK) and os.path.isfile(fname)): 329 _log.error('[%s] is not a readable file' % fname) 330 return False 331 332 gmPG2.file2bytea ( 333 query = u"UPDATE blobs.doc_obj SET data = %(data)s::bytea WHERE pk = %(pk)s", 334 filename = fname, 335 args = {'pk': self.pk_obj} 336 ) 337 338 # must update XMIN now ... 339 self.refetch_payload() 340 return True
341 #--------------------------------------------------------
342 - def set_reviewed(self, technically_abnormal=None, clinically_relevant=None):
343 # row already there ? 344 cmd = u""" 345 select pk 346 from blobs.reviewed_doc_objs 347 where 348 fk_reviewed_row = %s and 349 fk_reviewer = (select pk from dem.staff where db_user = current_user)""" 350 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}]) 351 352 # INSERT needed 353 if len(rows) == 0: 354 cols = [ 355 u"fk_reviewer", 356 u"fk_reviewed_row", 357 u"is_technically_abnormal", 358 u"clinically_relevant" 359 ] 360 vals = [ 361 u'%(fk_row)s', 362 u'%(abnormal)s', 363 u'%(relevant)s' 364 ] 365 args = { 366 'fk_row': self.pk_obj, 367 'abnormal': technically_abnormal, 368 'relevant': clinically_relevant 369 } 370 cmd = u""" 371 insert into blobs.reviewed_doc_objs ( 372 %s 373 ) values ( 374 (select pk from dem.staff where db_user=current_user), 375 %s 376 )""" % (', '.join(cols), ', '.join(vals)) 377 378 # UPDATE needed 379 if len(rows) == 1: 380 pk_row = rows[0][0] 381 args = { 382 'abnormal': technically_abnormal, 383 'relevant': clinically_relevant, 384 'pk_row': pk_row 385 } 386 cmd = u""" 387 update blobs.reviewed_doc_objs set 388 is_technically_abnormal = %(abnormal)s, 389 clinically_relevant = %(relevant)s 390 where 391 pk=%(pk_row)s""" 392 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) 393 394 return True
395 #--------------------------------------------------------
396 - def set_as_active_photograph(self):
397 if self._payload[self._idx['type']] != u'patient photograph': 398 return False 399 # set seq_idx to current max + 1 400 rows, idx = gmPG2.run_ro_queries ( 401 queries = [{ 402 'cmd': u'select coalesce(max(seq_idx)+1, 1) from blobs.doc_obj where fk_doc=%(doc_id)s', 403 'args': {'doc_id': self._payload[self._idx['pk_doc']]} 404 }] 405 ) 406 self._payload[self._idx['seq_idx']] = rows[0][0] 407 self._is_modified = True 408 self.save_payload()
409 #--------------------------------------------------------
410 - def display_via_mime(self, chunksize=0, block=None):
411 412 fname = self.export_to_file(aChunkSize = chunksize) 413 if fname is None: 414 return False, '' 415 416 success, msg = gmMimeLib.call_viewer_on_file(fname, block = block) 417 if not success: 418 return False, msg 419 420 return True, ''
421 #--------------------------------------------------------
422 - def format(self):
423 txt = _('%s document part [#%s]\n') % ( 424 gmTools.bool2str ( 425 boolean = self._payload[self._idx['reviewed']], 426 true_str = _('Reviewed'), 427 false_str = _('Unreviewed') 428 ), 429 self._payload[self._idx['pk_obj']] 430 ) 431 432 f_ext = u'' 433 if self._payload[self._idx['filename']] is not None: 434 f_ext = os.path.splitext(self._payload[self._idx['filename']])[1].strip('.').strip() 435 if f_ext != u'': 436 f_ext = u'.' + f_ext.upper() + u' ' 437 txt += _(' Part %s: %s %s(%s Bytes)\n') % ( 438 self._payload[self._idx['seq_idx']], 439 gmTools.size2str(self._payload[self._idx['size']]), 440 f_ext, 441 self._payload[self._idx['size']] 442 ) 443 444 if self._payload[self._idx['filename']] is not None: 445 txt += _(' Filename: %s\n') % self._payload[self._idx['filename']] 446 447 if self._payload[self._idx['obj_comment']] is not None: 448 txt += u'\n%s\n' % self._payload[self._idx['obj_comment']] 449 450 return txt
451 #------------------------------------------------------------
452 -def delete_document_part(part_pk=None, encounter_pk=None):
453 cmd = u"select blobs.delete_document_part(%(pk)s, %(enc)s)" 454 args = {'pk': part_pk, 'enc': encounter_pk} 455 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) 456 return
457 #============================================================ 458 _sql_fetch_document_fields = u""" 459 SELECT 460 *, 461 COALESCE ( 462 (SELECT array_agg(seq_idx) FROM blobs.doc_obj b_do WHERE b_do.fk_doc = b_vdm.pk_doc), 463 ARRAY[]::integer[] 464 ) 465 AS seq_idx_list 466 FROM 467 blobs.v_doc_med b_vdm 468 WHERE 469 %s 470 """ 471
472 -class cDocument(gmBusinessDBObject.cBusinessDBObject):
473 """Represents one medical document.""" 474 475 _cmd_fetch_payload = _sql_fetch_document_fields % u"pk_doc = %s" 476 _cmds_store_payload = [ 477 u"""update blobs.doc_med set 478 fk_type = %(pk_type)s, 479 fk_episode = %(pk_episode)s, 480 fk_encounter = %(pk_encounter)s, 481 clin_when = %(clin_when)s, 482 comment = gm.nullify_empty_string(%(comment)s), 483 ext_ref = gm.nullify_empty_string(%(ext_ref)s) 484 where 485 pk = %(pk_doc)s and 486 xmin = %(xmin_doc_med)s""", 487 u"""select xmin_doc_med from blobs.v_doc_med where pk_doc = %(pk_doc)s""" 488 ] 489 490 _updatable_fields = [ 491 'pk_type', 492 'comment', 493 'clin_when', 494 'ext_ref', 495 'pk_episode', 496 'pk_encounter' # mainly useful when moving visual progress notes to their respective encounters 497 ] 498 #--------------------------------------------------------
499 - def refetch_payload(self, ignore_changes=False):
500 try: del self.__has_unreviewed_parts 501 except AttributeError: pass 502 503 return super(cDocument, self).refetch_payload(ignore_changes = ignore_changes)
504 #--------------------------------------------------------
505 - def get_descriptions(self, max_lng=250):
506 """Get document descriptions. 507 508 - will return a list of rows 509 """ 510 if max_lng is None: 511 cmd = u"SELECT pk, text FROM blobs.doc_desc WHERE fk_doc = %s" 512 else: 513 cmd = u"SELECT pk, substring(text from 1 for %s) FROM blobs.doc_desc WHERE fk_doc=%%s" % max_lng 514 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}]) 515 return rows
516 #--------------------------------------------------------
517 - def add_description(self, description=None):
518 cmd = u"insert into blobs.doc_desc (fk_doc, text) values (%s, %s)" 519 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj, description]}]) 520 return True
521 #--------------------------------------------------------
522 - def update_description(self, pk=None, description=None):
523 cmd = u"update blobs.doc_desc set text = %(desc)s where fk_doc = %(doc)s and pk = %(pk_desc)s" 524 gmPG2.run_rw_queries(queries = [ 525 {'cmd': cmd, 'args': {'doc': self.pk_obj, 'pk_desc': pk, 'desc': description}} 526 ]) 527 return True
528 #--------------------------------------------------------
529 - def delete_description(self, pk=None):
530 cmd = u"delete from blobs.doc_desc where fk_doc = %(doc)s and pk = %(desc)s" 531 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': {'doc': self.pk_obj, 'desc': pk}}]) 532 return True
533 #--------------------------------------------------------
534 - def _get_parts(self):
535 cmd = _sql_fetch_document_part_fields % u"pk_doc = %s ORDER BY seq_idx" 536 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}], get_col_idx = True) 537 return [ cDocumentPart(row = {'pk_field': 'pk_obj', 'idx': idx, 'data': r}) for r in rows ]
538 539 parts = property(_get_parts, lambda x:x) 540 #--------------------------------------------------------
541 - def add_part(self, file=None):
542 """Add a part to the document.""" 543 # create dummy part 544 cmd = u""" 545 insert into blobs.doc_obj ( 546 fk_doc, data, seq_idx 547 ) VALUES ( 548 %(doc_id)s, 549 ''::bytea, 550 (select coalesce(max(seq_idx)+1, 1) from blobs.doc_obj where fk_doc=%(doc_id)s) 551 )""" 552 rows, idx = gmPG2.run_rw_queries ( 553 queries = [ 554 {'cmd': cmd, 'args': {'doc_id': self.pk_obj}}, 555 {'cmd': u"select currval('blobs.doc_obj_pk_seq')"} 556 ], 557 return_data = True 558 ) 559 # init document part instance 560 pk_part = rows[0][0] 561 new_part = cDocumentPart(aPK_obj = pk_part) 562 if not new_part.update_data_from_file(fname=file): 563 _log.error('cannot import binary data from [%s] into document part' % file) 564 gmPG2.run_rw_queries ( 565 queries = [ 566 {'cmd': u"delete from blobs.doc_obj where pk = %s", 'args': [pk_part]} 567 ] 568 ) 569 return None 570 new_part['filename'] = file 571 new_part.save_payload() 572 573 return new_part
574 #--------------------------------------------------------
575 - def add_parts_from_files(self, files=None, reviewer=None):
576 577 new_parts = [] 578 579 for filename in files: 580 new_part = self.add_part(file = filename) 581 if new_part is None: 582 msg = 'cannot instantiate document part object' 583 _log.error(msg) 584 return (False, msg, filename) 585 new_parts.append(new_part) 586 587 if reviewer is not None: 588 new_part['pk_intended_reviewer'] = reviewer # None == Null 589 success, data = new_part.save_payload() 590 if not success: 591 msg = 'cannot set reviewer to [%s]' % reviewer 592 _log.error(msg) 593 _log.error(str(data)) 594 return (False, msg, filename) 595 596 return (True, '', new_parts)
597 #--------------------------------------------------------
598 - def export_parts_to_files(self, export_dir=None, chunksize=0):
599 fnames = [] 600 for part in self.parts: 601 fname = part.export_to_file(aChunkSize = chunksize) 602 if export_dir is not None: 603 shutil.move(fname, export_dir) 604 fname = os.path.join(export_dir, os.path.split(fname)[1]) 605 fnames.append(fname) 606 return fnames
607 #--------------------------------------------------------
609 try: 610 return self.__has_unreviewed_parts 611 except AttributeError: 612 pass 613 614 cmd = u"SELECT EXISTS(SELECT 1 FROM blobs.v_obj4doc_no_data WHERE pk_doc = %(pk)s AND reviewed IS FALSE)" 615 args = {'pk': self.pk_obj} 616 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}]) 617 self.__has_unreviewed_parts = rows[0][0] 618 619 return self.__has_unreviewed_parts
620 621 has_unreviewed_parts = property(_get_has_unreviewed_parts, lambda x:x) 622 #--------------------------------------------------------
623 - def set_reviewed(self, technically_abnormal=None, clinically_relevant=None):
624 # FIXME: this is probably inefficient 625 for part in self.parts: 626 if not part.set_reviewed(technically_abnormal, clinically_relevant): 627 return False 628 return True
629 #--------------------------------------------------------
630 - def set_primary_reviewer(self, reviewer=None):
631 for part in self.parts: 632 part['pk_intended_reviewer'] = reviewer 633 success, data = part.save_payload() 634 if not success: 635 _log.error('cannot set reviewer to [%s]' % reviewer) 636 _log.error(str(data)) 637 return False 638 return True
639 #--------------------------------------------------------
640 - def format(self):
641 part_count = len(self._payload[self._idx['seq_idx_list']]) 642 if part_count == 0: 643 parts = _('no parts') 644 elif part_count == 1: 645 parts = _('1 part') 646 else: 647 parts = _('%s parts') % part_count 648 txt = _( 649 '%s (%s) #%s\n' 650 '\n' 651 ' Created: %s\n' 652 ' Episode: %s\n' 653 '%s' 654 '%s' 655 '%s' 656 ) % ( 657 self._payload[self._idx['l10n_type']], 658 parts, 659 self._payload[self._idx['pk_doc']], 660 gmDateTime.pydt_strftime(self._payload[self._idx['clin_when']], format = '%Y %B %d', accuracy = gmDateTime.acc_days), 661 self._payload[self._idx['episode']], 662 gmTools.coalesce(self._payload[self._idx['health_issue']], u'', _(' Health issue: %s\n')), 663 gmTools.coalesce(self._payload[self._idx['ext_ref']], u'', _(' External reference: %s\n')), 664 gmTools.coalesce(self._payload[self._idx['comment']], u'', u' %s') 665 ) 666 return txt
667 #------------------------------------------------------------
668 -def create_document(document_type=None, encounter=None, episode=None):
669 """Returns new document instance or raises an exception. 670 """ 671 cmd = u"""INSERT INTO blobs.doc_med (fk_type, fk_encounter, fk_episode) VALUES (%(type)s, %(enc)s, %(epi)s) RETURNING pk""" 672 try: 673 int(document_type) 674 except ValueError: 675 cmd = u""" 676 INSERT INTO blobs.doc_med ( 677 fk_type, 678 fk_encounter, 679 fk_episode 680 ) VALUES ( 681 coalesce ( 682 (SELECT pk from blobs.doc_type bdt where bdt.name = %(type)s), 683 (SELECT pk from blobs.doc_type bdt where _(bdt.name) = %(type)s) 684 ), 685 %(enc)s, 686 %(epi)s 687 ) RETURNING pk""" 688 689 args = {'type': document_type, 'enc': encounter, 'epi': episode} 690 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True) 691 doc = cDocument(aPK_obj = rows[0][0]) 692 return doc
693 #------------------------------------------------------------
694 -def search_for_documents(patient_id=None, type_id=None, external_reference=None):
695 """Searches for documents with the given patient and type ID.""" 696 if patient_id is None: 697 raise ValueError('need patient id to search for document') 698 699 args = {'pat_id': patient_id, 'type_id': type_id, 'ref': external_reference} 700 where_parts = [u'pk_patient = %(pat_id)s'] 701 702 if type_id is not None: 703 where_parts.append(u'pk_type = %(type_id)s') 704 705 if external_reference is not None: 706 where_parts.append(u'ext_ref = %(ref)s') 707 708 cmd = _sql_fetch_document_fields % u' AND '.join(where_parts) 709 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True) 710 return [ cDocument(row = {'data': r, 'idx': idx, 'pk_field': 'pk_doc'}) for r in rows ]
711 #------------------------------------------------------------
712 -def delete_document(document_id=None, encounter_id=None):
713 # cascades to doc_obj and doc_desc but not bill.bill 714 cmd = u"SELECT blobs.delete_document(%(pk)s, %(enc)s)" 715 args = {'pk': document_id, 'enc': encounter_id} 716 rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = True) 717 if not rows[0][0]: 718 _log.error('cannot delete document [%s]', document_id) 719 return False 720 return True
721 #------------------------------------------------------------
722 -def reclassify_documents_by_type(original_type=None, target_type=None):
723 724 _log.debug('reclassifying documents by type') 725 _log.debug('original: %s', original_type) 726 _log.debug('target: %s', target_type) 727 728 if target_type['pk_doc_type'] == original_type['pk_doc_type']: 729 return True 730 731 cmd = u""" 732 update blobs.doc_med set 733 fk_type = %(new_type)s 734 where 735 fk_type = %(old_type)s 736 """ 737 args = {u'new_type': target_type['pk_doc_type'], u'old_type': original_type['pk_doc_type']} 738 739 gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}]) 740 741 return True
742 743 #============================================================
744 -class cDocumentType(gmBusinessDBObject.cBusinessDBObject):
745 """Represents a document type.""" 746 _cmd_fetch_payload = u"""select * from blobs.v_doc_type where pk_doc_type=%s""" 747 _cmds_store_payload = [ 748 u"""update blobs.doc_type set 749 name = %(type)s 750 where 751 pk=%(pk_obj)s and 752 xmin=%(xmin_doc_type)s""", 753 u"""select xmin_doc_type from blobs.v_doc_type where pk_doc_type = %(pk_obj)s""" 754 ] 755 _updatable_fields = ['type'] 756 #--------------------------------------------------------
757 - def set_translation(self, translation=None):
758 759 if translation.strip() == '': 760 return False 761 762 if translation.strip() == self._payload[self._idx['l10n_type']].strip(): 763 return True 764 765 rows, idx = gmPG2.run_rw_queries ( 766 queries = [ 767 {'cmd': u'select i18n.i18n(%s)', 'args': [self._payload[self._idx['type']]]}, 768 {'cmd': u'select i18n.upd_tx((select i18n.get_curr_lang()), %(orig)s, %(tx)s)', 769 'args': { 770 'orig': self._payload[self._idx['type']], 771 'tx': translation 772 } 773 } 774 ], 775 return_data = True 776 ) 777 if not rows[0][0]: 778 _log.error('cannot set translation to [%s]' % translation) 779 return False 780 781 return self.refetch_payload()
782 #------------------------------------------------------------
783 -def get_document_types():
784 rows, idx = gmPG2.run_ro_queries ( 785 queries = [{'cmd': u"SELECT * FROM blobs.v_doc_type"}], 786 get_col_idx = True 787 ) 788 doc_types = [] 789 for row in rows: 790 row_def = {'pk_field': 'pk_doc_type', 'idx': idx, 'data': row} 791 doc_types.append(cDocumentType(row = row_def)) 792 return doc_types
793 #------------------------------------------------------------
794 -def get_document_type_pk(document_type=None):
795 args = {'typ': document_type.strip()} 796 797 cmd = u'SELECT pk FROM blobs.doc_type WHERE name = %(typ)s' 798 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 799 if len(rows) == 0: 800 cmd = u'SELECT pk FROM blobs.doc_type WHERE _(name) = %(typ)s' 801 rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False) 802 803 if len(rows) == 0: 804 return None 805 806 return rows[0]['pk']
807 #------------------------------------------------------------
808 -def create_document_type(document_type=None):
809 # check for potential dupes: 810 cmd = u'select pk from blobs.doc_type where name = %s' 811 rows, idx = gmPG2.run_ro_queries ( 812 queries = [{'cmd': cmd, 'args': [document_type]}] 813 ) 814 if len(rows) == 0: 815 cmd1 = u"INSERT INTO blobs.doc_type (name) VALUES (%s) RETURNING pk" 816 rows, idx = gmPG2.run_rw_queries ( 817 queries = [{'cmd': cmd1, 'args': [document_type]}], 818 return_data = True 819 ) 820 return cDocumentType(aPK_obj = rows[0][0])
821 #------------------------------------------------------------
822 -def delete_document_type(document_type=None):
823 if document_type['is_in_use']: 824 return False 825 gmPG2.run_rw_queries ( 826 queries = [{ 827 'cmd': u'delete from blobs.doc_type where pk=%s', 828 'args': [document_type['pk_doc_type']] 829 }] 830 ) 831 return True
832 #------------------------------------------------------------
833 -def get_ext_ref():
834 """This needs *considerably* more smarts.""" 835 dirname = gmTools.get_unique_filename ( 836 prefix = '', 837 suffix = time.strftime(".%Y%m%d-%H%M%S", time.localtime()) 838 ) 839 # extract name for dir 840 path, doc_ID = os.path.split(dirname) 841 return doc_ID
842 #============================================================ 843 # main 844 #------------------------------------------------------------ 845 if __name__ == '__main__': 846 847 if len(sys.argv) < 2: 848 sys.exit() 849 850 if sys.argv[1] != u'test': 851 sys.exit() 852 853 #--------------------------------------------------------
854 - def test_doc_types():
855 856 print "----------------------" 857 print "listing document types" 858 print "----------------------" 859 860 for dt in get_document_types(): 861 print dt 862 863 print "------------------------------" 864 print "testing document type handling" 865 print "------------------------------" 866 867 dt = create_document_type(document_type = 'dummy doc type for unit test 1') 868 print "created:", dt 869 870 dt['type'] = 'dummy doc type for unit test 2' 871 dt.save_payload() 872 print "changed base name:", dt 873 874 dt.set_translation(translation = 'Dummy-Dokumenten-Typ fuer Unit-Test') 875 print "translated:", dt 876 877 print "deleted:", delete_document_type(document_type = dt) 878 879 return
880 #--------------------------------------------------------
881 - def test_adding_doc_part():
882 883 print "-----------------------" 884 print "testing document import" 885 print "-----------------------" 886 887 docs = search_for_documents(patient_id=12) 888 doc = docs[0] 889 print "adding to doc:", doc 890 891 fname = sys.argv[1] 892 print "adding from file:", fname 893 part = doc.add_part(file=fname) 894 print "new part:", part 895 896 return
897 #--------------------------------------------------------
898 - def test_get_documents():
899 doc_folder = cDocumentFolder(aPKey=12) 900 901 #photo = doc_folder.get_latest_mugshot() 902 #print type(photo), photo 903 904 docs = doc_folder.get_documents() 905 for doc in docs: 906 print type(doc), doc 907 print doc.parts
908 #pprint(gmBusinessDBObject.jsonclasshintify(docs)) 909 #-------------------------------------------------------- 910 from Gnumed.pycommon import gmI18N 911 gmI18N.activate_locale() 912 gmI18N.install_domain() 913 914 #test_doc_types() 915 #test_adding_doc_part() 916 test_get_documents() 917 918 # print get_ext_ref() 919 920 #============================================================ 921