Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • med-team/gnumed-client
1 result
Show changes
Commits on Source (2)
Showing
with 5450 additions and 3534 deletions
......@@ -2,10 +2,63 @@
# client
*****************************************************************
------------------------------------------------
# rel-1-7-patches
------------------------------------------------
1.7.0
NEW: link document to procedure
NEW: link document to hospital stay
NEW: support receiver on documents
NEW: support inactivity of external care entries
NEW: DICOM image preview in PACS plugin
NEW: placeholder <$current_provider_name$>
NEW: placeholder <$current_provider_title$>
NEW: placeholder <$current_provider_firstnames$>
NEW: placeholder <$current_provider_lastnames$>
NEW: placeholder $<diagnoses>$
NEW: switch substance intakes to drug components only
NEW: monitor test results relevant to intakes
NEW: add a database sanity check tool
NEW: add a EMR structure export tool
NEW: more options for putting formatted EMR into export area
NEW: measurements sorted by problem
NEW: verify DICOM data integrity in Orthanc server
NEW: clinical hint about missing LOINCs
IMPROVED: EMR journal layout/retrieval speed
IMPROVED: patient overview usability
IMPROVED: document tree details view
IMPROVED: LaTeX formatting of current medications (port from 1.6 branch)
IMPROVED: early-connect error decoding
IMPROVED: fairly-recent encounter continuation logic
IMPROGED: handling of empty-encounter cleanup
IMPROVED: non-blocking update check
IMPROVED: non-blocking file description retrieval
IMPROVED: patient merging
IMPROVED: email sending framework
IMPROVED: export area workflow
IMPROVED: test results usability
IMPROVED: vaccine/vaccination handling
IMPROVED: test panels now LOINC based
IMPROVED: patient media creation
IMPROVED: use new timeline upstream
IMPROVED: Spanish translation [thanks Uwe]
IMPROVED: long QT syndrome hyperlink updated
IMPROVED: age/DOB tooltip in top panel
IMPROVED: measurements: access related docs from list-by-day
IMPROVED: patient studies download from PACS
IMPROVED: provider inbox layout
------------------------------------------------
# rel-1-6-patches
------------------------------------------------
1.6.16
IMPROVED: visual progress note editing workflow
1.6.15
FIX: exception on tooltipping patient overview inbox item
......@@ -1876,10 +1929,35 @@ FIX: missing cast to ::text in dem.date_trunc_utc() calls
# database
*****************************************************************
------------------------------------------------
# gnumed_v22
------------------------------------------------
NEW: revalidate constraints during database upgrade
NEW: deprecate gm-backup_* in favor of gm-backup
NEW: deprecate gm-restore_* in favor of gm-restore
IMPROVED: staging._journal_without_suppressed_hints -> clin._v_emr_journal_without_suppressed_hints
IMPROVED: safer backup scripts
IMPROVED: don't fail clin.remove_old_empty_encounters() but return FALSE on <2 encounters
IMPROVED: substance abuse entries can have arbitrary .discontinued
IMPROVED: rework vaccine/vaccination tables/views
IMPROVED: turn unique identity assertion into deferred constraint trigger
IMPROVED: allow empty and comment lines in schema change file list definitions
IMPROVED: bootstrapper error logging
IMPROVED: revive pg_upgrade helper
FIX: constrain clin.clin_root_item.soap_cat CHECK to lower case
------------------------------------------------
# gnumed_v21
------------------------------------------------
21.16
FIX: typo in fingerprint script man page [thanks Debian]
FIX: failure to recreate functions changing return type on upgrade
21.15
FIX: handle SQL_INHERITANCE in a way compatible with PG10
......
The Timeline code under .../client/timelinelib/ and
.../client/icons/ was downloaded from:
The Timeline code under
.../client/timelinelib/
.../client/tlicons/
was downloaded from:
http://hg.code.sf.net/p/thetimelineproj/stable
......
......@@ -33,7 +33,7 @@ ATC_NICOTINE = u'N07BA01'
ATC_ETHANOL = u'V03AB16'
#============================================================
def propagate_atc(substance=None, atc=None):
def propagate_atc(substance=None, atc=None, link_obj=None):
_log.debug('substance <%s>, ATC <%s>', substance, atc)
......@@ -42,7 +42,7 @@ def propagate_atc(substance=None, atc=None):
atc = None
if atc is None:
atcs = text2atc(text = substance, fuzzy = False)
atcs = text2atc(text = substance, fuzzy = False, link_obj = link_obj)
if len(atcs) == 0:
_log.debug(u'no ATC found, aborting')
return atc
......@@ -53,17 +53,17 @@ def propagate_atc(substance=None, atc=None):
args = {'atc': atc, 'term': substance.strip()}
queries = [
{'cmd': u"UPDATE ref.consumable_substance SET atc_code = %(atc)s WHERE lower(description) = lower(%(term)s) AND atc_code IS NULL",
{'cmd': u"UPDATE ref.substance SET atc = %(atc)s WHERE lower(description) = lower(%(term)s) AND atc IS NULL",
'args': args},
{'cmd': u"UPDATE ref.branded_drug SET atc_code = %(atc)s WHERE lower(description) = lower(%(term)s) AND atc_code IS NULL",
{'cmd': u"UPDATE ref.drug_product SET atc_code = %(atc)s WHERE lower(description) = lower(%(term)s) AND atc_code IS NULL",
'args': args}
]
gmPG2.run_rw_queries(queries = queries)
gmPG2.run_rw_queries(link_obj = link_obj, queries = queries)
return atc
#============================================================
def text2atc(text=None, fuzzy=False):
def text2atc(text=None, fuzzy=False, link_obj=None):
text = text.strip()
......@@ -76,12 +76,12 @@ def text2atc(text=None, fuzzy=False):
FROM ref.v_atc
WHERE term ilike %(term)s AND atc IS NOT NULL
UNION
SELECT atc_code, null, null
FROM ref.consumable_substance
WHERE description ilike %(term)s AND atc_code IS NOT NULL
SELECT atc as atc_code, null, null
FROM ref.substance
WHERE description ilike %(term)s AND atc IS NOT NULL
UNION
SELECT atc_code, null, null
FROM ref.branded_drug
FROM ref.drug_product
WHERE description ilike %(term)s AND atc_code IS NOT NULL
) as tmp
ORDER BY atc_code
......@@ -95,23 +95,30 @@ def text2atc(text=None, fuzzy=False):
FROM ref.v_atc
WHERE lower(term) = lower(%(term)s) AND atc IS NOT NULL
UNION
SELECT atc_code, null, null
FROM ref.consumable_substance
WHERE lower(description) = lower(%(term)s) AND atc_code IS NOT NULL
SELECT atc as atc_code, null, null
FROM ref.substance
WHERE lower(description) = lower(%(term)s) AND atc IS NOT NULL
UNION
SELECT atc_code, null, null
FROM ref.branded_drug
FROM ref.drug_product
WHERE lower(description) = lower(%(term)s) AND atc_code IS NOT NULL
) as tmp
ORDER BY atc_code
"""
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
rows, idx = gmPG2.run_ro_queries(link_obj = link_obj, queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
_log.debug(u'term: %s => ATCs: %s (fuzzy: %s)', text, rows, fuzzy)
return rows
#============================================================
def exists_as_atc(substance):
args = {'term': substance}
cmd = u'SELECT EXISTS (SELECT 1 FROM ref.atc WHERE lower(term) = lower(%(term)s))'
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
return rows[0][0]
#============================================================
def get_reference_atcs(order_by=u'atc, term, lang'):
cmd = u'SELECT * FROM ref.v_atc ORDER BY %s' % order_by
......
......@@ -40,7 +40,9 @@ class cDynamicHint(gmBusinessDBObject.cBusinessDBObject):
hint = gm.nullify_empty_string(%(hint)s),
url = gm.nullify_empty_string(%(url)s),
source = gm.nullify_empty_string(%(source)s),
is_active = %(is_active)s
is_active = %(is_active)s,
popup_type = %(popup_type)s,
highlight_as_priority = %(highlight_as_priority)s
WHERE
pk = %(pk_auto_hint)s
AND
......@@ -56,7 +58,9 @@ class cDynamicHint(gmBusinessDBObject.cBusinessDBObject):
u'hint',
u'url',
u'source',
u'is_active'
u'is_active',
u'popup_type',
u'highlight_as_priority'
]
#--------------------------------------------------------
def format_maximum_information(self, patient):
......
......@@ -366,7 +366,7 @@ class cBill(gmBusinessDBObject.cBusinessDBObject):
u'pk_doc'
]
#--------------------------------------------------------
def format(self):
def format(self, include_receiver=True, include_doc=True):
txt = u'%s [#%s]\n' % (
gmTools.bool2subst (
(self._payload[self._idx['close_date']] is None),
......@@ -412,21 +412,23 @@ class cBill(gmBusinessDBObject.cBusinessDBObject):
txt += _(' Items billed: 0\n')
else:
txt += _(' Items billed: %s\n') % len(self._payload[self._idx['pk_bill_items']])
txt += _(' Invoice: %s\n') % (
gmTools.bool2subst (
self._payload[self._idx['pk_doc']] is None,
_('not available'),
u'#%s' % self._payload[self._idx['pk_doc']]
if include_doc:
txt += _(' Invoice: %s\n') % (
gmTools.bool2subst (
self._payload[self._idx['pk_doc']] is None,
_('not available'),
u'#%s' % self._payload[self._idx['pk_doc']]
)
)
)
txt += _(' Patient: #%s\n') % self._payload[self._idx['pk_patient']]
txt += gmTools.coalesce (
self._payload[self._idx['pk_receiver_identity']],
u'',
_(' Receiver: #%s\n')
)
if self._payload[self._idx['pk_receiver_address']] is not None:
txt += u'\n '.join(gmDemographicRecord.get_patient_address(pk_patient_address = self._payload[self._idx['pk_receiver_address']]).format())
if include_receiver:
txt += gmTools.coalesce (
self._payload[self._idx['pk_receiver_identity']],
u'',
_(' Receiver: #%s\n')
)
if self._payload[self._idx['pk_receiver_address']] is not None:
txt += u'\n '.join(gmDemographicRecord.get_patient_address(pk_patient_address = self._payload[self._idx['pk_receiver_address']]).format())
return txt
#--------------------------------------------------------
......@@ -506,6 +508,13 @@ def get_bills(order_by=None, pk_patient=None):
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
return [ cBill(row = {'data': r, 'idx': idx, 'pk_field': 'pk_bill'}) for r in rows ]
#------------------------------------------------------------
def get_bills4document(pk_document=None):
args = {'pk_doc': pk_document}
cmd = _SQL_get_bill_fields % u'pk_doc = %(pk_doc)s'
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
return [ cBill(row = {'data': r, 'idx': idx, 'pk_field': 'pk_bill'}) for r in rows ]
#------------------------------------------------------------
def create_bill(conn=None, invoice_id=None):
......
"""GNUmed chart pulling related middleware."""
#============================================================
__license__ = "GPL v2 or later"
__author__ = "K.Hilbert <Karsten.Hilbert@gmx.net>"
import sys
import logging
if __name__ == '__main__':
sys.path.insert(0, '../../')
from Gnumed.pycommon import gmTools
from Gnumed.pycommon import gmCfg
from Gnumed.pycommon import gmPG2
from Gnumed.business import gmStaff
from Gnumed.business import gmProviderInbox
from Gnumed.business import gmEMRStructItems
from Gnumed.business import gmPraxis
_log = logging.getLogger('gm.praxis')
#============================================================
def _check_for_provider_chart_access(person):
curr_prov = gmStaff.gmCurrentProvider()
# can view my own chart
if person.ID == curr_prov['pk_identity']:
return True
# primary provider can view patient
if person['pk_primary_provider'] == curr_prov['pk_staff']:
return True
# is the patient a provider ?
if person.ID not in [ s['pk_identity'] for s in gmStaff.get_staff_list() ]:
return True
prov = u'%s (%s%s %s)' % (
curr_prov['short_alias'],
gmTools.coalesce(curr_prov['title'], u'', u'%s '),
curr_prov['firstnames'],
curr_prov['lastnames']
)
pat = u'%s%s %s' % (
gmTools.coalesce(person['title'], u'', u'%s '),
person['firstnames'],
person['lastnames']
)
# notify the staff member
gmProviderInbox.create_inbox_message (
staff = person.staff_id,
message_type = _('Privacy notice'),
message_category = u'administrative',
subject = _('%s: Your chart has been accessed by %s (without user interaction, probably by a script).') % (pat, prov),
patient = person.ID
)
# notify /me about the staff member notification
gmProviderInbox.create_inbox_message (
staff = curr_prov['pk_staff'],
message_type = _('Privacy notice'),
message_category = u'administrative',
subject = _('%s: Staff member %s has been notified of your chart access.') % (prov, pat)
)
return True
#----------------------------------------------------------------
def _ensure_person_is_patient(person):
if person.is_patient:
return True
person.is_patient = True
return True
#----------------------------------------------------------------
def _get_very_recent_encounter(pk_identity):
cfg_db = gmCfg.cCfgSQL()
min_ttl = cfg_db.get2 (
option = u'encounter.minimum_ttl',
workplace = gmPraxis.gmCurrentPraxisBranch().active_workplace,
bias = u'user',
default = u'1 hour 30 minutes'
)
cmd = u"""
SELECT pk_encounter
FROM clin.v_most_recent_encounters
WHERE
pk_patient = %s
and
last_affirmed > (now() - %s::interval)
ORDER BY
last_affirmed DESC"""
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [pk_identity, min_ttl]}])
if len(rows) == 0:
_log.debug('no <very recent> encounter (younger than [%s]) found' % min_ttl)
return None
_log.debug('"very recent" encounter [%s] found and re-activated', rows[0][0])
return gmEMRStructItems.cEncounter(aPK_obj = rows[0][0])
#----------------------------------------------------------------
def _decide_on_active_encounter(pk_identity):
enc = _get_very_recent_encounter(pk_identity)
if enc is not None:
return enc
_here = gmPraxis.gmCurrentPraxisBranch()
cfg_db = gmCfg.cCfgSQL()
enc_type = cfg_db.get2 (
option = u'encounter.default_type',
workplace = _here.active_workplace,
bias = u'user'
)
if enc_type is None:
enc_type = gmEMRStructItems.get_most_commonly_used_encounter_type()
if enc_type is None:
enc_type = u'in surgery'
enc = gmEMRStructItems.create_encounter(fk_patient = pk_identity, enc_type = enc_type)
enc['pk_org_unit'] = _here['pk_org_unit']
enc.save()
_log.debug('new encounter [%s] initiated' % enc['pk_encounter'])
return enc
#------------------------------------------------------------
def tui_chart_puller(person):
_log.debug('pulling chart for identity [%s]', person.ID)
# be careful about pulling charts of our own staff
if not _check_for_provider_chart_access(person):
return None
person.is_patient = True
enc = _decide_on_active_encounter(person.ID)
person.as_patient.ensure_has_allergy_state(enc['pk_encounter'])
# set encounter in EMR
from Gnumed.business import gmClinicalRecord
emr = gmClinicalRecord.cClinicalRecord(aPKey = person.ID, allow_user_interaction = False, encounter = enc)
emr.log_access(action = u'chart pulled for patient [%s] (no user interaction)' % person.ID)
return emr
#============================================================
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit()
if sys.argv[1] != 'test':
sys.exit()
# from Gnumed.pycommon import gmI18N
# gmI18N.install_domain()
......@@ -383,7 +383,7 @@ def get_as_journal(since=None, until=None, encounters=None, episodes=None, issue
raise ValueError('at least one of <patient>, <episodes>, <issues>, <encounters> must not be None')
if order_by is None:
order_by = u'ORDER BY c_vej.clin_when, c_vej.pk_episode, scr, c_vej.modified_when, c_vej.src_table'
order_by = u'ORDER BY clin_when, pk_episode, scr, modified_when, src_table'
else:
order_by = u'ORDER BY %s' % order_by
......@@ -417,8 +417,7 @@ def get_as_journal(since=None, until=None, encounters=None, episodes=None, issue
# FIXME: implement more constraints
# get rows from clin.v_emr_journal
cmd = u"""
cmd_journal = u"""
SELECT
to_char(c_vej.clin_when, 'YYYY-MM-DD') AS date,
c_vej.clin_when,
......@@ -449,43 +448,50 @@ def get_as_journal(since=None, until=None, encounters=None, episodes=None, issue
join clin.soap_cat_ranks c_scr on (c_scr.soap_cat IS NOT DISTINCT FROM c_vej.soap_cat)
WHERE
%s
%s""" % (
u'\n\t\t\t\t\tAND\n\t\t\t\t'.join(where_parts),
order_by
)
journal_rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
""" % u'\n\t\t\t\t\tAND\n\t\t\t\t'.join(where_parts)
if active_encounter is not None:
# get rows from clin.get_hints_for_patient()
pk_identity = journal_rows[0]['pk_patient']
hints = gmAutoHints.get_hints_for_patient(pk_identity = pk_identity)
for hint in hints:
d = {}
d['date'] = gmDateTime.pydt_strftime(active_encounter['started'], '%Y-%m-%d')
d['clin_when'] = active_encounter['started']
d['soap_cat'] = u'a'
d['narrative'] = hint.format()
d['src_table'] = u'ref.auto_hint'
d['rank'] = 3 # FIXME: should be rank_of['a']
d['modified_when'] = active_encounter['started'] # FIXME: should be hint['modified_when']
d['date_modified'] = gmDateTime.pydt_strftime(active_encounter['started'], '%Y-%m-%d %H:%M') # FIXME: should use hint['modified_when']
d['modified_by'] = active_encounter['modified_by'] # FIXME: should be hint['modified_by']
d['row_version'] = 0 # FIXME: should be hint['row_version']
d['pk_episode'] = None
d['pk_encounter'] = active_encounter['pk_encounter']
d['real_soap_cat'] = u'a'
d['src_pk'] = hint['pk_auto_hint']
d['pk_health_issue'] = None
d['health_issue'] = u''
d['episode'] = u''
d['issue_active'] = False
d['issue_clinically_relevant'] = False
d['episode_open'] = False
d['encounter_started'] = active_encounter['started']
d['encounter_last_affirmed'] = active_encounter['last_affirmed']
d['encounter_l10n_type'] = active_encounter['l10n_type']
d['pk_patient'] = pk_identity
journal_rows.append(d)
if active_encounter is None:
cmd = cmd_journal + u'\n ' + order_by
else:
args['pk_enc'] = active_encounter['pk_encounter']
args['enc_start'] = active_encounter['started']
args['enc_last_affirmed'] = active_encounter['last_affirmed']
args['enc_type'] = active_encounter['l10n_type']
args['enc_pat'] = active_encounter['pk_patient']
cmd_hints = u"""
SELECT
to_char(now(), 'YYYY-MM-DD') AS date,
now() as clin_when,
'a'::text as soap_cat,
hints.title || E'\n' || hints.hint
as narrative,
'ref.auto_hint'::text as src_table,
c_scr.rank AS scr,
now() as modified_when,
to_char(now(), 'YYYY-MM-DD HH24:MI') AS date_modified,
current_user as modified_by,
0::integer as row_version,
NULL::integer as pk_episode,
%(pk_enc)s as pk_encounter,
'a'::text as real_soap_cat,
hints.pk_auto_hint as src_pk,
NULL::integer as pk_health_issue,
''::text as health_issue,
''::text as episode,
False as issue_active,
False as issue_clinically_relevant,
False as episode_open,
%(enc_start)s as encounter_started,
%(enc_last_affirmed)s as encounter_last_affirmed,
%(enc_type)s as encounter_l10n_type,
%(enc_pat)s as pk_patient
FROM
clin.get_hints_for_patient(%(enc_pat)s) as hints
join clin.soap_cat_ranks c_scr on (c_scr.soap_cat = 'a')
"""
cmd = cmd_journal + u'\nUNION ALL\n' + cmd_hints + u'\n' + order_by
journal_rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
return journal_rows
......
......@@ -47,6 +47,7 @@ class cClinicalResult(object):
self.sub_results = []
self.warnings = [_('THIS IS NOT A VERIFIED MEASUREMENT. DO NOT USE FOR ACTUAL CARE.')]
self.hints = []
#--------------------------------------------------------
def __unicode__(self):
txt = u'[cClinicalResult]: %s %s (%s)\n\n%s' % (
......@@ -65,6 +66,7 @@ class cClinicalResult(object):
)
)
return txt
#--------------------------------------------------------
def format(self, left_margin=0, eol=u'\n', width=None, with_formula=False, with_warnings=True, with_variables=False, with_sub_results=False, with_hints=True, return_list=False):
lines = []
......@@ -147,6 +149,7 @@ class cClinicalCalculator(object):
def __init__(self, patient=None):
self.__cache = {}
self.__patient = patient
#--------------------------------------------------------
def _get_patient(self):
return self.__patient
......@@ -158,9 +161,7 @@ class cClinicalCalculator(object):
self.remove_from_cache() # uncache all values
patient = property(lambda x:x, _set_patient)
#--------------------------------------------------------
# def suggest_algorithm(self, pk_test_type):
# return None
#--------------------------------------------------------
def remove_from_cache(self, key=None):
if key is None:
......@@ -172,6 +173,7 @@ class cClinicalCalculator(object):
except KeyError:
_log.error('key [%s] does not exist in cache', key)
return False
#--------------------------------------------------------
# formulae
#--------------------------------------------------------
......@@ -230,6 +232,7 @@ class cClinicalCalculator(object):
_log.debug(u'%s' % result)
return result
#--------------------------------------------------------
def _get_egfrs(self):
egfrs = [
......@@ -253,7 +256,7 @@ class cClinicalCalculator(object):
# this logic is based on "KVH aktuell 2/2014 Seite 10-15"
# expect normal GFR
CKD = self.eGFR_CKD_EPI
if CKD.numeric_value > 60:
if CKD.numeric_value > self.d(60):
return CKD
# CKD at or below 60
......@@ -282,7 +285,7 @@ class cClinicalCalculator(object):
age = gmDateTime.calculate_apparent_age(start = self.__patient['dob'])[0]
# geriatric ?
if age > 65:
if age > self.d(65):
if CG.numeric_value is not None:
return CG
......@@ -292,7 +295,7 @@ class cClinicalCalculator(object):
return CKD
return CG
if MDRD.numeric_value > 60:
if MDRD.numeric_value > self.d(60):
if CKD.numeric_value is not None:
# probably normal after all (>60) -> use CKD-EPI
return CKD
......@@ -780,6 +783,7 @@ class cClinicalCalculator(object):
return result
body_surface_area = property(_get_body_surface_area, lambda x:x)
#--------------------------------------------------------
def _get_body_mass_index(self):
......@@ -862,6 +866,7 @@ class cClinicalCalculator(object):
body_mass_index = property(_get_body_mass_index, lambda x:x)
bmi = property(_get_body_mass_index, lambda x:x)
#--------------------------------------------------------
# helper functions
#--------------------------------------------------------
......
This diff is collapsed.
......@@ -21,6 +21,8 @@ import httplib2
import json
import zipfile
import shutil
import time
import datetime as pydt
from urllib import urlencode
import distutils.version as version
......@@ -59,7 +61,11 @@ class cOrthancServer:
def connect(self, host, port, user, password, expected_minimal_version=None, expected_name=None, expected_aet=None):
if (host is None) or (host.strip() == u''):
host = u'localhost'
self.__server_url = str('http://%s:%s' % (host, port))
try:
self.__server_url = str('http://%s:%s' % (host, port))
except Exception:
_log.exception(u'cannot create server url from: host [%s] and port [%s]', host, port)
return False
self.__user = user
self.__password = password
_log.info('connecting as [%s] to Orthanc server at [%s]', self.__user, self.__server_url)
......@@ -99,6 +105,30 @@ class cOrthancServer:
return False
_log.debug('server: %s', system_data)
self.__server_identification = system_data
# check time skew
tolerance = 60 # seconds
client_now_as_utc = pydt.datetime.utcnow()
start = time.time()
orthanc_now_str = self.__run_GET(url = '%s/tools/now' % self.__server_url) # 20180208T165832
end = time.time()
query_duration = end - start
orthanc_now_unknown_tz = pydt.datetime.strptime(orthanc_now_str, '%Y%m%dT%H%M%S')
_log.info('GNUmed "now" (UTC): %s', client_now_as_utc)
_log.info('Orthanc "now" (UTC): %s', orthanc_now_unknown_tz)
_log.debug('wire roundtrip (seconds): %s', query_duration)
_log.debug('maximum skew tolerance (seconds): %s', tolerance)
if query_duration > tolerance:
_log.error('useless to check GNUmed/Orthanc time skew, wire roundtrip > tolerance (%s)', tolerance)
else:
if orthanc_now_unknown_tz > client_now_as_utc:
real_skew = orthanc_now_unknown_tz - client_now_as_utc
else:
real_skew = client_now_as_utc - orthanc_now_unknown_tz
_log.debug('GNUmed/Orthanc time skew: %s', real_skew)
if real_skew > pydt.timedelta(seconds = tolerance):
_log.error('GNUmed/Orthanc time skew > tolerance (may be due to timezone differences)')
return self.__server_identification
server_identification = property(_get_server_identification, lambda x:x)
......@@ -428,20 +458,34 @@ class cOrthancServer:
_log.info(u'exporting %s studies into [%s]', len(study_ids), filename)
_log.debug(u'studies: %s', study_ids)
f = io.open(filename, 'wb')
url = '%s/tools/create-media' % self.__server_url
_log.debug(url)
# You have to make a POST request against URI "/tools/create-media", with a
# JSON body that contains the array of the resources of interest (as Orthanc
# identifiers). Here is a sample command-line:
# curl -X POST http://localhost:8042/tools/create-media -d '["8c4663df-c3e66066-9e20a8fc-dd14d1e5-251d3d84","2cd4848d-02f0005f-812ffef6-a210bbcf-3f01a00a","6eeded74-75005003-c3ae9738-d4a06a4f-6beedeb8","8a622020-c058291c-7693b63f-bc67aa2e-0a02e69c"]' -v > /tmp/a.zip
# (this will not create duplicates but will also not check for single-patient-ness)
url = '%s/tools/create-media-extended' % self.__server_url
_log.debug(url)
try:
f.write(self.__run_POST(url = url, data = study_ids))
downloaded = self.__run_POST(url = url, data = study_ids, output_file = f)
if not downloaded:
_log.error('this Orthanc version probably does not support "create-media-extended"')
except TypeError:
f.close()
_log.exception('cannot retrieve multiple studies as one archive with DICOMDIR, probably not supported by this Orthanc version')
return False
f.close()
# retry with old URL
if not downloaded:
url = '%s/tools/create-media' % self.__server_url
_log.debug(u'retrying: %s', url)
try:
downloaded = self.__run_POST(url = url, data = study_ids, output_file = f)
if not downloaded:
return False
except TypeError:
_log.exception('cannot retrieve multiple studies as one archive with DICOMDIR, probably not supported by this Orthanc version')
return False
finally:
f.close()
if create_zip:
return filename
if target_dir is None:
......@@ -451,6 +495,39 @@ class cOrthancServer:
return False
return target_dir
#--------------------------------------------------------
def get_instance_dicom_tags(self, instance_id, simplified=True):
_log.debug('retrieving DICOM tags for instance [%s]', instance_id)
if simplified:
download_url = '%s/instances/%s/simplified-tags' % (self.__server_url, instance_id)
else:
download_url = '%s/instances/%s/tags' % (self.__server_url, instance_id)
return self.__run_GET(url = download_url)
#--------------------------------------------------------
def get_instance_preview(self, instance_id, filename=None):
if filename is None:
filename = gmTools.get_unique_filename(suffix = '.png')
_log.debug('exporting preview for instance [%s] into [%s]', instance_id, filename)
download_url = '%s/instances/%s/preview' % (self.__server_url, instance_id)
f = io.open(filename, 'wb')
f.write(self.__run_GET(url = download_url))
f.close()
return filename
#--------------------------------------------------------
def get_instance(self, instance_id, filename=None):
if filename is None:
filename = gmTools.get_unique_filename(suffix = '.dcm')
_log.debug('exporting instance [%s] into [%s]', instance_id, filename)
download_url = '%s/instances/%s/attachments/dicom/data' % (self.__server_url, instance_id)
f = io.open(filename, 'wb')
f.write(self.__run_GET(url = download_url))
f.close()
return filename
#--------------------------------------------------------
# server-side API
#--------------------------------------------------------
......@@ -483,6 +560,28 @@ class cOrthancServer:
url = '%s/patients/%s/protected' % (self.__server_url, str(orthanc_id))
return (self.__run_GET(url) == 1)
#--------------------------------------------------------
def verify_patient_data(self, orthanc_id):
_log.info('verifying DICOM data of patient [%s]', orthanc_id)
bad_data = []
instances_url = '%s/patients/%s/instances' % (self.__server_url, orthanc_id)
instances = self.__run_GET(instances_url)
for instance in instances:
instance_id = instance['ID']
attachments_url = '%s/instances/%s/attachments' % (self.__server_url, instance_id)
attachments = self.__run_GET(attachments_url)
for attachment in attachments:
verify_url = '%s/%s/verify-md5' % (attachments_url, attachment)
# False, success = "{}"
#2018-02-08 19:11:27 ERROR gm.dicom [-1211701504 MainThread] (gmDICOM.py::__run_POST() #986): cannot POST: http://localhost:8042/instances/5a8206f4-24619e76-6650d9cd-792cdf25-039e96e6/attachments/dicom-as-json/verify-md5
#2018-02-08 19:11:27 ERROR gm.dicom [-1211701504 MainThread] (gmDICOM.py::__run_POST() #987): response: {'status': '400', 'content-length': '0'}
if self.__run_POST(verify_url) is not False:
continue
_log.error(u'bad MD5 of DICOM file at url [%s]: patient=%s, attachment_type=%s', verify_url, orthanc_id, attachment)
bad_data.append({'patient': orthanc_id, 'instance': instance_id, 'type': attachment, 'orthanc': u'%s [%s]' % (self.server_identification, self.__server_url)})
return bad_data
#--------------------------------------------------------
def modify_patient_id(self, old_patient_id, new_patient_id):
......@@ -668,11 +767,7 @@ class cOrthancServer:
except KeyError:
pass
for key in pat_dict:
if pat_dict[key] == u'unknown':
pat_dict[key] = None
if pat_dict[key] == u'(null)':
pat_dict[key] = None
if pat_dict[key] == u'':
if pat_dict[key] in [u'unknown', u'(null)', u'']:
pat_dict[key] = None
pat_dict[key] = cleanup_dicom_string(pat_dict[key])
studies_by_patient.append(pat_dict)
......@@ -718,11 +813,7 @@ class cOrthancServer:
except KeyError:
pass
for key in study_dict:
if study_dict[key] == u'unknown':
study_dict[key] = None
if study_dict[key] == u'(null)':
study_dict[key] = None
if study_dict[key] == u'':
if study_dict[key] in [u'unknown', u'(null)', u'']:
study_dict[key] = None
study_dict[key] = cleanup_dicom_string(study_dict[key])
study_dict['all_tags'] = {}
......@@ -749,18 +840,22 @@ class cOrthancServer:
# loop over series in study
for orth_series_id in orth_study['Series']:
orth_series = self.__run_GET(url = u'%s/series/%s' % (self.__server_url, orth_series_id))
#slices = orth_series['Instances']
ordered_slices = self.__run_GET(url = u'%s/series/%s/ordered-slices' % (self.__server_url, orth_series_id))
slices = [ s[0] for s in ordered_slices['SlicesShort'] ]
if orth_series is False:
_log.error('cannot retrieve series')
return []
series_dict = {
'orthanc_id': orth_series['ID'],
'instances': len(orth_series['Instances']),
'instances': slices,
'modality': None,
'date': None,
'time': None,
'description': None,
'body_part': None,
'protocol': None
'protocol': None,
'performed_procedure_step_description': None
}
try:
series_dict['modality'] = orth_series['MainDicomTags']['Modality'].strip()
......@@ -786,16 +881,21 @@ class cOrthancServer:
series_dict['protocol'] = orth_series['MainDicomTags']['ProtocolName'].strip()
except KeyError:
pass
try:
series_dict['performed_procedure_step_description'] = orth_series['MainDicomTags']['PerformedProcedureStepDescription'].strip()
except KeyError:
pass
for key in series_dict:
if series_dict[key] == u'unknown':
series_dict[key] = None
if series_dict[key] == u'(null)':
series_dict[key] = None
if series_dict[key] == u'':
if series_dict[key] in [u'unknown', u'(null)', u'']:
series_dict[key] = None
if series_dict['description'] == series_dict['protocol']:
_log.debug('<series description> matches <series protocol>, ignoring protocol')
series_dict['protocol'] = None
if series_dict['performed_procedure_step_description'] in [series_dict['description'], series_dict['protocol']]:
series_dict['performed_procedure_step_description'] = None
if series_dict['performed_procedure_step_description'] is not None:
if regex.match ('[.,/\|\-\s\d]+', series_dict['performed_procedure_step_description'], flags = regex.UNICODE):
series_dict['performed_procedure_step_description'] = None
if series_dict['date'] == study_dict['date']:
_log.debug('<series date> matches <study date>, ignoring date')
series_dict['date'] = None
......@@ -857,8 +957,8 @@ class cOrthancServer:
return content
#--------------------------------------------------------
def __run_POST(self, url=None, data=None, content_type=u''):
if isinstance(data, str):
def __run_POST(self, url=None, data=None, content_type=u'', output_file=None):
if isinstance(data, basestring):
body = data
if len(content_type) != 0:
headers = { 'content-type' : content_type }
......@@ -888,19 +988,28 @@ class cOrthancServer:
if response.status == 404:
_log.debug(u'no data, response: %s', response)
return []
if output_file is None:
return []
return False
if not (response.status in [ 200, 302 ]):
_log.error(u'cannot POST: %s', url)
_log.error(u'response: %s', response)
return False
#_log.debug(u'response: %s', response)
try:
return json.loads(content)
content = json.loads(content)
# return json.loads(content)
except StandardError:
pass
# return content
if output_file is None:
return content
output_file.write(content)
return True
#--------------------------------------------------------
def __run_PUT(self, url=None, data=None, content_type=u''):
if isinstance(data, str):
if isinstance(data, basestring):
body = data
if len(content_type) != 0:
headers = { 'content-type' : content_type }
......@@ -1007,23 +1116,28 @@ if __name__ == "__main__":
pats = orthanc.get_patients_by_name(name_parts = entered_name.split(), fuzzy = True)
for pat in pats:
print(pat)
bad_data = orthanc.verify_patient_data(pat['ID'])
for bad in bad_data:
print(bad)
continue
continue
pats = orthanc.get_studies_list_by_patient_name(name_parts = entered_name.split(), fuzzy = True)
for pat in pats:
print(pat['name'])
for study in pat['studies']:
print(u' ', gmTools.format_dict_like(study, relevant_keys = ['orthanc_id', 'date', 'time'], template = u'study [%%(orthanc_id)s] at %%(date)s %%(time)s contains %s series' % len(study['series'])))
for series in study['series']:
print (
u' ',
gmTools.format_dict_like (
series,
relevant_keys = ['orthanc_id', 'date', 'time', 'modality', 'instances', 'body_part', 'protocol', 'description', 'station'],
template = u'series [%(orthanc_id)s] at %(date)s %(time)s: "%(description)s" %(modality)s@%(station)s (%(protocol)s) of body part "%(body_part)s" holds %(instances)s images'
)
)
#print(orthanc.get_study_as_zip_with_dicomdir(study_id = study['orthanc_id'], filename = 'study_%s.zip' % study['orthanc_id']))
# for series in study['series']:
# print (
# u' ',
# gmTools.format_dict_like (
# series,
# relevant_keys = ['orthanc_id', 'date', 'time', 'modality', 'instances', 'body_part', 'protocol', 'description', 'station'],
# template = u'series [%(orthanc_id)s] at %(date)s %(time)s: "%(description)s" %(modality)s@%(station)s (%(protocol)s) of body part "%(body_part)s" holds images:\n%(instances)s'
# )
# )
print(orthanc.get_studies_with_dicomdir(study_ids = [study['orthanc_id']], filename = 'study_%s.zip' % study['orthanc_id'], create_zip = True))
#print(orthanc.get_study_as_zip(study_id = study['orthanc_id'], filename = 'study_%s.zip' % study['orthanc_id']))
#print(orthanc.get_studies_as_zip_with_dicomdir(study_ids = [ s['orthanc_id'] for s in pat['studies'] ], filename = 'studies_of_%s.zip' % pat['orthanc_id']))
print(u'--------')
......@@ -1032,11 +1146,15 @@ if __name__ == "__main__":
def run_console():
try:
host = sys.argv[2]
port = sys.argv[3]
except IndexError:
host = None
try:
port = sys.argv[3]
except IndexError:
port = '8042'
orthanc_console(host, port)
#--------------------------------------------------------
def test_modify_patient_id():
try:
......@@ -1100,6 +1218,27 @@ if __name__ == "__main__":
orthanc.upload_from_directory(directory = sys.argv[2], recursive = True, check_mime_type = False, ignore_other_files = True)
#--------------------------------------------------------
#run_console()
def test_get_instance_preview():
host = None
port = '8042'
orthanc = cOrthancServer()
if not orthanc.connect(host, port, user = None, password = None): #, expected_aet = 'another AET'
print('error connecting to server:', orthanc.connect_error)
return False
print('Connected to Orthanc server "%s" (AET [%s] - version [%s] - DB [%s])' % (
orthanc.server_identification['Name'],
orthanc.server_identification['DicomAet'],
orthanc.server_identification['Version'],
orthanc.server_identification['DatabaseVersion']
))
print('')
print(orthanc.get_instance_preview('f4f07d22-0d8265ef-112ea4e9-dc140e13-350c06d1'))
print(orthanc.get_instance('f4f07d22-0d8265ef-112ea4e9-dc140e13-350c06d1'))
#--------------------------------------------------------
run_console()
#test_modify_patient_id()
test_upload_files()
#test_upload_files()
#test_get_instance_preview()
......@@ -91,6 +91,7 @@ class cDocumentFolder:
return None
prescription = cDocument(aPK_obj = rows[0][0])
return prescription
#--------------------------------------------------------
def get_latest_mugshot(self):
cmd = u"SELECT pk_obj FROM blobs.v_latest_mugshot WHERE pk_patient = %s"
......@@ -101,6 +102,7 @@ class cDocumentFolder:
return cDocumentPart(aPK_obj = rows[0][0])
latest_mugshot = property(get_latest_mugshot, lambda x:x)
#--------------------------------------------------------
def get_mugshot_list(self, latest_only=True):
if latest_only:
......@@ -120,6 +122,7 @@ class cDocumentFolder:
"""
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_patient]}])
return rows
#--------------------------------------------------------
def get_doc_list(self, doc_type=None):
"""return flat list of document IDs"""
......@@ -151,13 +154,15 @@ class cDocumentFolder:
for row in rows:
doc_ids.append(row[0])
return doc_ids
#--------------------------------------------------------
def get_visual_progress_notes(self, episodes=None, encounter=None):
return self.get_documents (
doc_type = DOCUMENT_TYPE_VISUAL_PROGRESS_NOTE,
episodes = episodes,
pk_episodes = episodes,
encounter = encounter
)
#--------------------------------------------------------
def get_unsigned_documents(self):
args = {'pat': self.pk_patient}
......@@ -173,8 +178,9 @@ class cDocumentFolder:
ORDER BY clin_when DESC"""
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
return [ cDocument(row = {'pk_field': 'pk_doc', 'idx': idx, 'data': r}) for r in rows ]
#--------------------------------------------------------
def get_documents(self, doc_type=None, episodes=None, encounter=None, order_by=None, exclude_unsigned=False):
def get_documents(self, doc_type=None, pk_episodes=None, encounter=None, order_by=None, exclude_unsigned=False, pk_types=None):
"""Return list of documents."""
args = {
......@@ -191,9 +197,13 @@ class cDocumentFolder:
except (TypeError, ValueError):
where_parts.append(u'pk_type = (SELECT pk FROM blobs.doc_type WHERE name = %(type)s)')
if (episodes is not None) and (len(episodes) > 0):
where_parts.append(u'pk_episode IN %(epi)s')
args['epi'] = tuple(episodes)
if pk_types is not None:
where_parts.append(u'pk_type IN %(pk_types)s')
args['pk_types'] = tuple(pk_types)
if (pk_episodes is not None) and (len(pk_episodes) > 0):
where_parts.append(u'pk_episode IN %(epis)s')
args['epis'] = tuple(pk_episodes)
if encounter is not None:
where_parts.append(u'pk_encounter = %(enc)s')
......@@ -266,7 +276,7 @@ class cDocumentPart(gmBusinessDBObject.cBusinessDBObject):
#--------------------------------------------------------
# retrieve data
#--------------------------------------------------------
def save_to_file(self, aChunkSize=0, filename=None, target_mime=None, target_extension=None, ignore_conversion_problems=False, directory=None):
def save_to_file(self, aChunkSize=0, filename=None, target_mime=None, target_extension=None, ignore_conversion_problems=False, directory=None, adjust_extension=False, conn=None):
if self._payload[self._idx['size']] == 0:
return None
......@@ -281,13 +291,16 @@ class cDocumentPart(gmBusinessDBObject.cBusinessDBObject):
},
filename = filename,
chunk_size = aChunkSize,
data_size = self._payload[self._idx['size']]
data_size = self._payload[self._idx['size']],
conn = conn
)
if not success:
return None
if target_mime is None:
if filename.endswith(u'.dat'):
if adjust_extension:
return gmMimeLib.adjust_extension_by_mimetype(filename)
return filename
if target_extension is None:
......@@ -296,7 +309,7 @@ class cDocumentPart(gmBusinessDBObject.cBusinessDBObject):
target_path, name = os.path.split(filename)
name, tmp = os.path.splitext(name)
target_fname = gmTools.get_unique_filename (
prefix = '%s-converted-' % name,
prefix = '%s-conv-' % name,
suffix = target_extension
)
_log.debug('attempting conversion: [%s] -> [<%s>:%s]', filename, target_mime, target_fname)
......@@ -311,6 +324,9 @@ class cDocumentPart(gmBusinessDBObject.cBusinessDBObject):
if not ignore_conversion_problems:
return None
if filename.endswith(u'.dat'):
if adjust_extension:
filename = gmMimeLib.adjust_extension_by_mimetype(filename)
_log.warning('programmed to ignore conversion problems, hoping receiver can handle [%s]', filename)
return filename
......@@ -416,6 +432,7 @@ insert into blobs.reviewed_doc_objs (
rows, idx = gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': args}])
return True
#--------------------------------------------------------
def set_as_active_photograph(self):
if self._payload[self._idx['type']] != u'patient photograph':
......@@ -431,6 +448,7 @@ insert into blobs.reviewed_doc_objs (
self._payload[self._idx['seq_idx']] = rows[0][0]
self._is_modified = True
self.save_payload()
#--------------------------------------------------------
def reattach(self, pk_doc=None):
if pk_doc == self._payload[self._idx['pk_doc']]:
......@@ -480,6 +498,7 @@ insert into blobs.reviewed_doc_objs (
return False, msg
return True, ''
#--------------------------------------------------------
def format_single_line(self):
f_ext = u''
......@@ -550,8 +569,9 @@ insert into blobs.reviewed_doc_objs (
# preserve original filename extension if available
suffix = '.dat'
if self._payload[self._idx['filename']] is not None:
tmp, suffix = os.path.splitext(self._payload[self._idx['filename']])
suffix = suffix.strip().replace(' ', '-').lower()
tmp, suffix = os.path.splitext (
gmTools.fname_sanitize(self._payload[self._idx['filename']]).lower()
)
if suffix == u'':
suffix = '.dat'
......@@ -580,12 +600,12 @@ insert into blobs.reviewed_doc_objs (
if make_unique:
fname = gmTools.get_unique_filename (
prefix = '%s-' % fname,
prefix = '%s-' % gmTools.fname_sanitize(fname),
suffix = suffix,
tmp_dir = directory
)
else:
fname = os.path.join(gmTools.coalesce(directory, u''), fname + suffix)
fname = gmTools.fname_sanitize(os.path.join(gmTools.coalesce(directory, u''), fname + suffix))
return fname
......@@ -601,38 +621,29 @@ def delete_document_part(part_pk=None, encounter_pk=None):
return
#============================================================
_sql_fetch_document_fields = u"""
SELECT
*,
COALESCE (
(SELECT array_agg(seq_idx) FROM blobs.doc_obj b_do WHERE b_do.fk_doc = b_vdm.pk_doc),
ARRAY[]::integer[]
)
AS seq_idx_list
FROM
blobs.v_doc_med b_vdm
WHERE
%s"""
_sql_fetch_document_fields = u"SELECT * FROM blobs.v_doc_med b_vdm WHERE %s"
class cDocument(gmBusinessDBObject.cBusinessDBObject):
"""Represents one medical document."""
_cmd_fetch_payload = _sql_fetch_document_fields % u"pk_doc = %s"
_cmds_store_payload = [
u"""update blobs.doc_med set
u"""UPDATE blobs.doc_med SET
fk_type = %(pk_type)s,
fk_episode = %(pk_episode)s,
fk_encounter = %(pk_encounter)s,
fk_org_unit = %(pk_org_unit)s,
unit_is_receiver = %(unit_is_receiver)s,
clin_when = %(clin_when)s,
comment = gm.nullify_empty_string(%(comment)s),
ext_ref = gm.nullify_empty_string(%(ext_ref)s)
where
ext_ref = gm.nullify_empty_string(%(ext_ref)s),
fk_hospital_stay = %(pk_hospital_stay)s
WHERE
pk = %(pk_doc)s and
xmin = %(xmin_doc_med)s""",
u"""select xmin_doc_med from blobs.v_doc_med where pk_doc = %(pk_doc)s"""
]
xmin = %(xmin_doc_med)s
RETURNING
xmin AS xmin_doc_med"""
]
_updatable_fields = [
'pk_type',
'comment',
......@@ -640,14 +651,18 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
'ext_ref',
'pk_episode',
'pk_encounter', # mainly useful when moving visual progress notes to their respective encounters
'pk_org_unit'
'pk_org_unit',
'unit_is_receiver',
'pk_hospital_stay'
]
#--------------------------------------------------------
def refetch_payload(self, ignore_changes=False, link_obj=None):
try: del self.__has_unreviewed_parts
except AttributeError: pass
return super(cDocument, self).refetch_payload(ignore_changes = ignore_changes, link_obj = link_obj)
#--------------------------------------------------------
def get_descriptions(self, max_lng=250):
"""Get document descriptions.
......@@ -660,11 +675,13 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
cmd = u"SELECT pk, substring(text from 1 for %s) FROM blobs.doc_desc WHERE fk_doc=%%s" % max_lng
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj]}])
return rows
#--------------------------------------------------------
def add_description(self, description=None):
cmd = u"insert into blobs.doc_desc (fk_doc, text) values (%s, %s)"
gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': [self.pk_obj, description]}])
return True
#--------------------------------------------------------
def update_description(self, pk=None, description=None):
cmd = u"update blobs.doc_desc set text = %(desc)s where fk_doc = %(doc)s and pk = %(pk_desc)s"
......@@ -672,11 +689,13 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
{'cmd': cmd, 'args': {'doc': self.pk_obj, 'pk_desc': pk, 'desc': description}}
])
return True
#--------------------------------------------------------
def delete_description(self, pk=None):
cmd = u"delete from blobs.doc_desc where fk_doc = %(doc)s and pk = %(desc)s"
gmPG2.run_rw_queries(queries = [{'cmd': cmd, 'args': {'doc': self.pk_obj, 'desc': pk}}])
return True
#--------------------------------------------------------
def _get_parts(self):
cmd = _sql_fetch_document_part_fields % u"pk_doc = %s ORDER BY seq_idx"
......@@ -684,6 +703,7 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
return [ cDocumentPart(row = {'pk_field': 'pk_obj', 'idx': idx, 'data': r}) for r in rows ]
parts = property(_get_parts, lambda x:x)
#--------------------------------------------------------
def add_part(self, file=None, link_obj=None):
"""Add a part to the document."""
......@@ -715,6 +735,7 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
new_part.save_payload(conn = link_obj)
return new_part
#--------------------------------------------------------
def add_parts_from_files(self, files=None, reviewer=None):
......@@ -738,16 +759,21 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
return (False, msg, filename)
return (True, '', new_parts)
#--------------------------------------------------------
def save_parts_to_files(self, export_dir=None, chunksize=0):
def save_parts_to_files(self, export_dir=None, chunksize=0, conn=None):
fnames = []
for part in self.parts:
fname = part.save_to_file(aChunkSize = chunksize)
if export_dir is not None:
shutil.move(fname, export_dir)
fname = os.path.join(export_dir, os.path.split(fname)[1])
fname = part.save_to_file(aChunkSize = chunksize, directory = export_dir, conn = conn)
# if export_dir is not None:
# shutil.move(fname, export_dir)
# fname = os.path.join(export_dir, os.path.split(fname)[1])
if fname is None:
_log.error(u'cannot export document part [%s]', part)
continue
fnames.append(fname)
return fnames
#--------------------------------------------------------
def _get_has_unreviewed_parts(self):
try:
......@@ -832,6 +858,27 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
parts = _('1 part')
else:
parts = _('%s parts') % part_count
org = u''
if self._payload[self._idx['unit']] is not None:
if self._payload[self._idx['unit_is_receiver']]:
org = _(' Receiver: %s @ %s\n') % (
self._payload[self._idx['unit']],
self._payload[self._idx['organization']]
)
else:
org = _(' Sender: %s @ %s\n') % (
self._payload[self._idx['unit']],
self._payload[self._idx['organization']]
)
stay = u''
if self._payload[self._idx['pk_hospital_stay']] is not None:
stay = _(u'Hospital stay') + u': %s\n' % self.hospital_stay.format (
left_margin = 0,
include_procedures = False,
include_docs = False,
include_episode = False
)
txt = _(
'%s (%s) #%s\n'
' Created: %s\n'
......@@ -840,6 +887,7 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
'%s'
'%s'
'%s'
'%s'
) % (
self._payload[self._idx['l10n_type']],
parts,
......@@ -848,17 +896,44 @@ class cDocument(gmBusinessDBObject.cBusinessDBObject):
self._payload[self._idx['episode']],
gmTools.coalesce(self._payload[self._idx['health_issue']], u'', _(' Health issue: %s\n')),
gmTools.coalesce(self._payload[self._idx['ext_ref']], u'', _(' External reference: %s\n')),
gmTools.coalesce(self._payload[self._idx['unit']], u'', _(' Organization: %%s @ %s\n') % self._payload[self._idx['organization']]),
org,
stay,
gmTools.coalesce(self._payload[self._idx['comment']], u'', u' %s')
)
return txt
#--------------------------------------------------------
def _get_hospital_stay(self):
if self._payload[self._idx['pk_hospital_stay']] is None:
return None
from Gnumed.business import gmEMRStructItems
return gmEMRStructItems.cHospitalStay(self._payload[self._idx['pk_hospital_stay']])
hospital_stay = property(_get_hospital_stay, lambda x:x)
#--------------------------------------------------------
def _get_org_unit(self):
if self._payload[self._idx['pk_org_unit']] is None:
return None
return gmOrganization.cOrgUnit(self._payload[self._idx['pk_org_unit']])
org_unit = property(_get_org_unit, lambda x:x)
#--------------------------------------------------------
def _get_procedures(self):
from gmEMRStructItems import get_procedures4document
return get_procedures4document(pk_document = self.pk_obj)
procedures = property(_get_procedures, lambda x:x)
#--------------------------------------------------------
def _get_bills(self):
from gmBilling import get_bills4document
return get_bills4document(pk_document = self.pk_obj)
bills = property(_get_bills, lambda x:x)
#------------------------------------------------------------
def create_document(document_type=None, encounter=None, episode=None, link_obj=None):
"""Returns new document instance or raises an exception."""
......@@ -886,13 +961,22 @@ def create_document(document_type=None, encounter=None, episode=None, link_obj=N
return doc
#------------------------------------------------------------
def search_for_documents(patient_id=None, type_id=None, external_reference=None):
def search_for_documents(patient_id=None, type_id=None, external_reference=None, pk_episode=None, pk_types=None):
"""Searches for documents with the given patient and type ID."""
if patient_id is None:
raise ValueError('need patient id to search for document')
args = {'pat_id': patient_id, 'type_id': type_id, 'ref': external_reference}
where_parts = [u'pk_patient = %(pat_id)s']
if (patient_id is None) and (pk_episode is None):
raise ValueError('need patient_id or pk_episode to search for document')
where_parts = []
args = {
'pat_id': patient_id,
'type_id': type_id,
'ref': external_reference,
'pk_epi': pk_episode
}
if patient_id is not None:
where_parts.append(u'pk_patient = %(pat_id)s')
if type_id is not None:
where_parts.append(u'pk_type = %(type_id)s')
......@@ -900,6 +984,13 @@ def search_for_documents(patient_id=None, type_id=None, external_reference=None)
if external_reference is not None:
where_parts.append(u'ext_ref = %(ref)s')
if pk_episode is not None:
where_parts.append(u'pk_episode = %(pk_epi)s')
if pk_types is not None:
where_parts.append(u'pk_type IN %(pk_types)s')
args['pk_types'] = tuple(pk_types)
cmd = _sql_fetch_document_fields % u' AND '.join(where_parts)
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = True)
return [ cDocument(row = {'data': r, 'idx': idx, 'pk_field': 'pk_doc'}) for r in rows ]
......@@ -976,6 +1067,7 @@ class cDocumentType(gmBusinessDBObject.cBusinessDBObject):
return False
return self.refetch_payload()
#------------------------------------------------------------
def get_document_types():
rows, idx = gmPG2.run_ro_queries (
......@@ -987,6 +1079,7 @@ def get_document_types():
row_def = {'pk_field': 'pk_doc_type', 'idx': idx, 'data': row}
doc_types.append(cDocumentType(row = row_def))
return doc_types
#------------------------------------------------------------
def get_document_type_pk(document_type=None):
args = {'typ': document_type.strip()}
......@@ -1001,6 +1094,14 @@ def get_document_type_pk(document_type=None):
return None
return rows[0]['pk']
#------------------------------------------------------------
def map_types2pk(document_types=None):
args = {'types': tuple(document_types)}
cmd = u'SELECT pk_doc_type, coalesce(l10n_type, type) as desc FROM blobs.v_doc_type WHERE l10n_type IN %(types)s OR type IN %(types)s'
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
return rows
#------------------------------------------------------------
def create_document_type(document_type=None):
# check for potential dupes:
......@@ -1016,6 +1117,7 @@ def create_document_type(document_type=None):
return_data = True
)
return cDocumentType(aPK_obj = rows[0][0])
#------------------------------------------------------------
def delete_document_type(document_type=None):
if document_type['is_in_use']:
......@@ -1027,6 +1129,7 @@ def delete_document_type(document_type=None):
}]
)
return True
#------------------------------------------------------------
def get_ext_ref():
"""This needs *considerably* more smarts."""
......@@ -1095,6 +1198,7 @@ if __name__ == '__main__':
return
#--------------------------------------------------------
def test_get_documents():
doc_folder = cDocumentFolder(aPKey=12)
#photo = doc_folder.get_latest_mugshot()
......@@ -1133,7 +1237,7 @@ if __name__ == '__main__':
#test_doc_types()
#test_adding_doc_part()
#test_get_documents()
test_get_useful_filename()
test_get_documents()
#test_get_useful_filename()
# print get_ext_ref()
This diff is collapsed.
This diff is collapsed.
......@@ -30,6 +30,7 @@ from Gnumed.pycommon import gmDateTime
from Gnumed.pycommon import gmCfg2
from Gnumed.business import gmDocuments
from Gnumed.business import gmKeywordExpansion
_log = logging.getLogger('gm.exp_area')
......@@ -47,10 +48,7 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
_cmd_fetch_payload = _SQL_get_export_items % u"pk_export_item = %s"
_cmds_store_payload = [
u"""UPDATE clin.export_item SET
fk_identity = CASE
WHEN %(pk_doc_obj)s IS NULL THEN %(pk_identity)s
ELSE NULL
END,
fk_identity = %(pk_identity)s,
created_by = gm.nullify_empty_string(%(created_by)s),
created_when = %(created_when)s,
designation = gm.nullify_empty_string(%(designation)s),
......@@ -80,6 +78,27 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
u'filename'
]
#--------------------------------------------------------
def __init__(self, aPK_obj=None, row=None, link_obj=None):
super(cExportItem, self).__init__(aPK_obj = aPK_obj, row = row, link_obj = link_obj)
# force auto-healing if need be
if self._payload[self._idx['pk_identity_raw_needs_update']]:
_log.warning (
u'auto-healing export item [%s] from identity [%s] to [%s] because of document part [%s] seems necessary',
self._payload[self._idx['pk_export_item']],
self._payload[self._idx['pk_identity_raw']],
self._payload[self._idx['pk_identity']],
self._payload[self._idx['pk_doc_obj']]
)
if self._payload[self._idx['pk_doc_obj']] is None:
_log.error(u'however, .fk_doc_obj is NULL, which should not happen, leaving things alone for manual inspection')
return
# only flag ourselves as modified, do not actually
# modify any values, better safe than sorry
self._is_modified = True
self.save()
self.refetch_payload(ignore_changes = False, link_obj = link_obj)
#--------------------------------------------------------
# def format(self):
# return u'%s' % self
#--------------------------------------------------------
......@@ -120,8 +139,8 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
return part.save_to_file (
aChunkSize = aChunkSize,
filename = filename,
ignore_conversion_problems = True
#, directory = directory
ignore_conversion_problems = True,
adjust_extension = True
)
# data in export area table
......@@ -137,10 +156,12 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
chunk_size = aChunkSize,
data_size = self._payload[self._idx['size']]
)
if not success:
return None
if filename.endswith(u'.dat'):
return gmMimeLib.adjust_extension_by_mimetype(filename)
return filename
#--------------------------------------------------------
......@@ -168,8 +189,9 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
# preserve original filename extension if available
suffix = '.dat'
if self._payload[self._idx['filename']] is not None:
tmp, suffix = os.path.splitext(self._payload[self._idx['filename']])
suffix = suffix.strip().replace(' ', '-').lower()
tmp, suffix = os.path.splitext (
gmTools.fname_sanitize(self._payload[self._idx['filename']]).lower()
)
if suffix == u'':
suffix = '.dat'
......@@ -190,6 +212,7 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
return gmDocuments.cDocumentPart(aPK_obj = self._payload[self._idx['pk_doc_obj']])
document_part = property(_get_doc_part, lambda x:x)
#--------------------------------------------------------
def _get_is_print_job(self):
return self._payload[self._idx['designation']] == PRINT_JOB_DESIGNATION
......@@ -202,6 +225,7 @@ class cExportItem(gmBusinessDBObject.cBusinessDBObject):
self.save()
is_print_job = property(_get_is_print_job, _set_is_print_job)
#------------------------------------------------------------
def get_export_items(order_by=None, pk_identity=None, designation=None):
......@@ -212,6 +236,8 @@ def get_export_items(order_by=None, pk_identity=None, designation=None):
where_parts = []
if pk_identity is not None:
where_parts.append(u'pk_identity = %(pat)s')
# note that invalidly linked items will be
# auto-healed when instantiated
if designation is None:
where_parts.append(u"designation IS DISTINCT FROM %(desig)s")
else:
......@@ -250,10 +276,7 @@ def create_export_item(description=None, pk_identity=None, pk_doc_obj=None, file
) VALUES (
gm.nullify_empty_string(%(desc)s),
%(pk_obj)s,
(CASE
WHEN %(pk_obj)s IS NULL THEN %(pk_pat)s
ELSE NULL::integer
END),
%(pk_pat)s,
(CASE
WHEN %(pk_obj)s IS NULL THEN %(fname)s::bytea
ELSE NULL::bytea
......@@ -282,6 +305,7 @@ _html_start = u"""<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<link rel="icon" type="image/x-icon" href="gnumed.ico">
<title>%(html_title_header)s %(html_title_patient)s</title>
</head>
<body>
......@@ -301,6 +325,7 @@ _html_start = u"""<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
<li><a href="./">%(browse_root)s</a></li>
<li><a href="documents/">%(browse_docs)s</a></li>
%(browse_dicomdir)s
%(run_dicom_viewer)s
</ul>
<ul>
......@@ -327,11 +352,20 @@ _html_end = u"""
"""
_autorun_inf = (
u'[AutoRun]\r\n' # needs \r\n for Windows
u'label=%s\r\n' # patient name/DOB
_autorun_inf = ( # needs \r\n for Windows
u'[AutoRun.Amd64]\r\n' # 64 bit
u'label=%(label)s\r\n' # patient name/DOB
u'shellexecute=index.html\r\n'
u'action=%(action)s\r\n' # % _('Browse patient data')
u'%(icon)s\r\n' # "icon=gnumed.ico" or ""
u'UseAutoPlay=1\r\n'
u'\r\n'
u'[AutoRun]\r\n' # 32 bit
u'label=%(label)s\r\n' # patient name/DOB
u'shellexecute=index.html\r\n'
u'action=%s\r\n' # % _('Browse patient data')
u'action=%(action)s\r\n' # % _('Browse patient data')
u'%(icon)s\r\n' # "icon=gnumed.ico" or ""
u'UseAutoPlay=1\r\n'
u'\r\n'
u'[Content]\r\n'
u'PictureFiles=yes\r\n'
......@@ -343,7 +377,6 @@ _autorun_inf = (
u'\r\n'
u'[unused]\r\n'
u'open=requires explicit executable\r\n'
u'icon=use standard icon for storage unit\r\n'
)
......@@ -356,6 +389,8 @@ u'CreationDate=%s\r\n'
u'PID=%s\r\n'
u'EMR=GNUmed\r\n'
u'Version=%s\r\n'
u'#StudyDate=\r\n'
u'#VNRInfo=<body part>\r\n'
u'\r\n'
u'# name format: lastnames, firstnames\r\n'
u'# date format: YYYY-MM-DD (ISO 8601)\r\n'
......@@ -502,7 +537,26 @@ class cExportArea(object):
return cExportItem(row = {'data': r, 'idx': idx, 'pk_field': 'pk_export_item'})
#--------------------------------------------------------
def export(self, base_dir=None, items=None, with_metadata=True, expand_compressed=False):
def dump_items_to_disk(self, base_dir=None, items=None):
if items is None:
items = self.items
if len(items) == 0:
return None
if base_dir is None:
from Gnumed.business.gmPerson import cPatient
pat = cPatient(aPK_obj = self.__pk_identity)
base_dir = gmTools.mk_sandbox_dir(prefix = u'exp-%s-' % pat.dirname)
_log.debug('dumping export items to: %s', base_dir)
gmTools.mkdir(base_dir)
for item in items:
item.save_to_file(directory = base_dir)
return base_dir
#--------------------------------------------------------
def export(self, base_dir=None, items=None, expand_compressed=False):
if items is None:
items = self.items
......@@ -510,14 +564,20 @@ class cExportArea(object):
if len(items) == 0:
return None
media_base_dir = base_dir
from Gnumed.business.gmPerson import cPatient
pat = cPatient(aPK_obj = self.__pk_identity)
if base_dir is None:
base_dir = gmTools.mk_sandbox_dir(prefix = u'exp-%s-' % pat.dirname)
_log.debug('base dir: %s', base_dir)
if media_base_dir is None:
media_base_dir = gmTools.mk_sandbox_dir(prefix = u'exp-%s-' % pat.dirname)
_log.debug('patient media base dir: %s', media_base_dir)
doc_dir = os.path.join(base_dir, r'documents')
gmTools.mkdir(doc_dir)
doc_dir = os.path.join(media_base_dir, r'documents')
if os.path.isdir(doc_dir):
index_existing_docs = True
else:
index_existing_docs = False
gmTools.mkdir(doc_dir)
_html_start_data = {
u'html_title_header': _('Patient data for'),
......@@ -531,24 +591,38 @@ class cExportArea(object):
u'docs_title': _(u'Documents'),
u'browse_root': _(u'browse storage medium'),
u'browse_docs': _(u'browse documents area'),
u'browse_dicomdir': u''
u'browse_dicomdir': u'',
u'run_dicom_viewer': u''
}
mugshot = pat.document_folder.latest_mugshot
if mugshot is not None:
_html_start_data['mugshot_url'] = mugshot.save_to_file(directory = doc_dir)
_html_start_data['mugshot_url'] = mugshot.save_to_file(directory = doc_dir, adjust_extension = True)
_html_start_data['mugshot_alt'] =_('patient photograph from %s') % gmDateTime.pydt_strftime(mugshot['date_generated'], '%B %Y')
_html_start_data['mugshot_title'] = gmDateTime.pydt_strftime(mugshot['date_generated'], '%B %Y')
if u'DICOMDIR' in os.listdir(media_base_dir):
_html_start_data[u'browse_dicomdir'] = u'<li><a href="./DICOMDIR">%s</a></li>' % _(u'show DICOMDIR file')
# copy DWV into target dir
dwv_target_dir = os.path.join(media_base_dir, u'dwv')
gmTools.rmdir(dwv_target_dir)
dwv_src_dir = os.path.join(gmTools.gmPaths().local_base_dir, u'dwv4export')
if not os.path.isdir(dwv_src_dir):
dwv_src_dir = os.path.join(gmTools.gmPaths().system_app_data_dir, u'dwv4export')
try:
shutil.copytree(dwv_src_dir, dwv_target_dir)
_html_start_data[u'run_dicom_viewer'] = u'<li><a href="./dwv/viewers/mobile-local/index.html">%s</a></li>' % _(u'run Radiology Images (DICOM) Viewer')
except shutil.Error, OSError:
_log.exception('cannot include DWV, skipping')
# index.html
idx_fname = os.path.join(base_dir, u'index.html')
# - header
idx_fname = os.path.join(media_base_dir, u'index.html')
idx_file = io.open(idx_fname, mode = u'wt', encoding = u'utf8')
# header
existing_files = os.listdir(base_dir)
if u'DICOMDIR' in existing_files:
_html_start_data[u'browse_dicomdir'] = u' <li><a href="./DICOMDIR">browse DICOMDIR</a></li>'
idx_file.write(_html_start % _html_start_data)
# middle (side effect ! -> exports items into files ...)
# - middle (side effect ! -> exports items into files ...)
existing_docs = os.listdir(doc_dir) # get them now, or else we will include the to-be-exported items
# - export items
for item in items:
item_path = item.save_to_file(directory = doc_dir)
item_fname = os.path.split(item_path)[1]
......@@ -556,7 +630,13 @@ class cExportArea(object):
item_fname,
gmTools.html_escape_string(item['description'])
))
# footer
# - preexisting documents
for doc_fname in existing_docs:
idx_file.write(_html_list_item % (
doc_fname,
gmTools.html_escape_string(_(u'other: %s') % doc_fname)
))
# - footer
_cfg = gmCfg2.gmCfgData()
from Gnumed.business.gmPraxis import gmCurrentPraxisBranch
prax = gmCurrentPraxisBranch()
......@@ -586,29 +666,44 @@ class cExportArea(object):
idx_file.close()
# start.html (just a copy of index.html, really ;-)
start_fname = os.path.join(base_dir, u'start.html')
start_fname = os.path.join(media_base_dir, u'start.html')
try:
shutil.copy2(idx_fname, start_fname)
except Exception:
_log.exception('cannot copy %s to %s', idx_fname, start_fname)
# autorun.inf
name = pat.active_name
last = name['lastnames'][:14]
first = name['firstnames'][:min(14, 18 - len(last))]
label = ((u'%s%s%s' % (
u'%s,%s' % (last, first),
gmTools.coalesce(pat['gender'], u'', u' (%s)'),
pat.get_formatted_dob(format = ' %Y%m%d', none_string = u'', honor_estimation = False)
)).strip())[:32] # max 32 chars, supposedly ASCII, but LATIN1 works pretty well
action = _('Browse patient data')
autorun_fname = os.path.join(base_dir, u'autorun.inf')
autorun_file = io.open(autorun_fname, mode = u'wt', encoding = u'utf8')
autorun_file.write(_autorun_inf % (label, action))
autorun_dict = {}
autorun_dict['label'] = self._compute_autorun_inf_label(pat)
autorun_dict['action'] = _('Browse patient data')
autorun_dict['icon'] = u''
media_icon_kwd = u'$$gnumed_patient_media_export_icon'
media_icon_kwd_exp = gmKeywordExpansion.get_expansion (
keyword = media_icon_kwd,
textual_only = False,
binary_only = True
)
icon_tmp_file = media_icon_kwd_exp.save_to_file (
target_mime = u'image/x-icon',
target_extension = u'.ico',
ignore_conversion_problems = True
)
if icon_tmp_file is None:
_log.debug(u'cannot retrieve <%s>', media_icon_kwd)
else:
media_icon_fname = os.path.join(media_base_dir, u'gnumed.ico')
try:
shutil.move(icon_tmp_file, media_icon_fname)
autorun_dict['icon'] = u'icon=gnumed.ico'
except Exception:
_log.exception('cannot move %s to %s', icon_tmp_file, media_icon_fname)
autorun_fname = os.path.join(media_base_dir, u'autorun.inf')
autorun_file = io.open(autorun_fname, mode = 'wt', encoding = 'cp1252', errors = 'replace')
autorun_file.write(_autorun_inf % autorun_dict)
autorun_file.close()
# cd.inf
cd_inf_fname = os.path.join(base_dir, u'cd.inf')
cd_inf_fname = os.path.join(media_base_dir, u'cd.inf')
cd_inf_file = io.open(cd_inf_fname, mode = u'wt', encoding = u'utf8')
cd_inf_file.write(_cd_inf % (
pat['lastnames'],
......@@ -623,7 +718,7 @@ class cExportArea(object):
cd_inf_file.close()
# README
readme_fname = os.path.join(base_dir, u'README')
readme_fname = os.path.join(media_base_dir, u'README')
readme_file = io.open(readme_fname, mode = u'wt', encoding = u'utf8')
readme_file.write(_README % (
pat.get_description_gender(with_nickname = False) + u', ' + _(u'born') + u' ' + pat.get_formatted_dob('%Y %B %d')
......@@ -631,14 +726,44 @@ class cExportArea(object):
readme_file.close()
# patient demographics as GDT/XML/VCF
pat.export_as_gdt(filename = os.path.join(base_dir, u'patient.gdt'))
pat.export_as_xml_linuxmednews(filename = os.path.join(base_dir, u'patient.xml'))
pat.export_as_vcard(filename = os.path.join(base_dir, u'patient.vcf'))
pat.export_as_gdt(filename = os.path.join(media_base_dir, u'patient.gdt'))
pat.export_as_xml_linuxmednews(filename = os.path.join(media_base_dir, u'patient.xml'))
pat.export_as_vcard(filename = os.path.join(media_base_dir, u'patient.vcf'))
# praxis VCF
shutil.move(prax.vcf, os.path.join(base_dir, u'praxis.vcf'))
shutil.move(prax.vcf, os.path.join(media_base_dir, u'praxis.vcf'))
return base_dir
return media_base_dir
#--------------------------------------------------------
def _compute_autorun_inf_label(self, patient):
LABEL_MAX_LEN = 32
dob = patient.get_formatted_dob(format = ' %Y%m%d', none_string = u'', honor_estimation = False)
if dob == u'':
gender_template = u' (%s)'
else:
gender_template = u' %s'
gender = gmTools.coalesce(patient['gender'], u'', gender_template)
name_max_len = LABEL_MAX_LEN - len(gender) - len(dob) # they already include appropriate padding
name = patient.active_name
last = name['lastnames'].strip()
first = name['firstnames'].strip()
len_last = len(last)
len_first = len(first)
while (len_last + len_first + 1) > name_max_len:
if len_first > 6:
len_first -= 1
if first[len_first - 1] == u' ':
len_first -= 1
continue
len_last -= 1
if last[len_last - 1] == u' ':
len_last -= 1
last = last[:len_last].strip().upper()
first = first[:len_first].strip()
# max 32 chars, supposedly ASCII, but CP1252 likely works pretty well
label = ((u'%s %s%s%s' % (last, first, dob, gender)).strip())[:32]
return label
#--------------------------------------------------------
# properties
......@@ -681,6 +806,7 @@ if __name__ == '__main__':
item['pk_doc_obj'] = 1
item.save()
print item
#---------------------------------------
def test_export_area():
exp = cExportArea(12)
......@@ -690,7 +816,55 @@ if __name__ == '__main__':
prax = gmPraxis.gmCurrentPraxisBranch(branch = gmPraxis.cPraxisBranch(1))
print prax
print prax.branch
print exp.export(with_metadata = True)
print exp.export()
#---------------------------------------
def test_label():
from Gnumed.business.gmPerson import cPatient
from Gnumed.business.gmPersonSearch import ask_for_patient
#while ask_for_patient() is not None:
pat_min = 1
pat_max = 100
try:
pat_min = int(sys.argv[2])
pat_max = int(sys.argv[3])
except:
pass
cPatient(aPK_obj = pat_min)
f = io.open('x-auto_inf_labels.txt', mode = 'w', encoding = 'utf8')
f.write(u'--------------------------------\n')
f.write(u'12345678901234567890123456789012\n')
f.write(u'--------------------------------\n')
for pat_id in range(pat_min, pat_max):
try:
exp_area = cExportArea(pat_id)
pat = cPatient(aPK_obj = pat_id)
except:
continue
f.write(exp_area._compute_autorun_inf_label(pat) + u'\n')
f.close()
return
#---------------------------------------
#test_export_items()
test_export_area()
#test_export_area()
test_label()
sys.exit(0)
#============================================================
# CDROM "run.bat":
#
#@echo off
#
#if defined ProgramFiles(x86) (
# ::64-bit
# start /B x64\mdicom.exe /scan .
#) else (
# ::32-bit
# start /B win32\mdicom.exe /scan .
#)
#
#--------------------------------------------------
......@@ -33,7 +33,7 @@ _SQL_get_external_care_items = u"""SELECT * FROM clin.v_external_care WHERE %s""
class cExternalCareItem(gmBusinessDBObject.cBusinessDBObject):
"""Represents an external care item.
Note: Upon saving .issue being non-empty and not None will
Note: Upon saving .issue being (non-empty AND not None) will
override .fk_health_issue (IOW, if your code wants to set
.fk_health_issue to something other than NULL it needs to
unset .issue explicitly (to u'' or None)).
......@@ -46,6 +46,7 @@ class cExternalCareItem(gmBusinessDBObject.cBusinessDBObject):
issue = gm.nullify_empty_string(%(issue)s),
provider = gm.nullify_empty_string(%(provider)s),
fk_org_unit = %(pk_org_unit)s,
inactive = %(inactive)s,
fk_health_issue = (
CASE
WHEN gm.is_null_or_blank_string(%(issue)s) IS TRUE THEN %(pk_health_issue)s
......@@ -67,12 +68,21 @@ class cExternalCareItem(gmBusinessDBObject.cBusinessDBObject):
u'pk_org_unit',
u'issue',
u'provider',
u'comment'
u'comment',
u'inactive'
]
#--------------------------------------------------------
def format(self, with_health_issue=True, with_address=False, with_comms=False):
lines = []
lines.append(_(u'External care #%s') % self._payload[self._idx['pk_external_care']])
lines.append(_(u'External care%s #%s') % (
gmTools.bool2subst (
self._payload[self._idx['inactive']],
u' (%s)' % _('inactive'),
u'',
u' [ERROR: .inactive is NULL]'
),
self._payload[self._idx['pk_external_care']]
))
if with_health_issue:
if self._payload[self._idx['pk_health_issue']] is None:
lines.append(u' ' + _(u'Issue: %s') % self._payload[self._idx['issue']])
......@@ -106,7 +116,7 @@ class cExternalCareItem(gmBusinessDBObject.cBusinessDBObject):
org_unit = property(_get_org_unit, lambda x:x)
#------------------------------------------------------------
def get_external_care_items(order_by=None, pk_identity=None, pk_health_issue=None):
def get_external_care_items(order_by=None, pk_identity=None, pk_health_issue=None, exclude_inactive=False):
args = {
'pk_pat': pk_identity,
......@@ -117,6 +127,8 @@ def get_external_care_items(order_by=None, pk_identity=None, pk_health_issue=Non
where_parts.append(u'pk_identity = %(pk_pat)s')
if pk_health_issue is not None:
where_parts.append(u'pk_health_issue = %(pk_issue)s')
if exclude_inactive is True:
where_parts.append(u'inactive IS FALSE')
if len(where_parts) == 0:
where = u'TRUE'
......
......@@ -192,6 +192,7 @@ class cFormTemplate(gmBusinessDBObject.cBusinessDBObject):
return rows[0][0]
template_data = property(_get_template_data, lambda x:x)
#--------------------------------------------------------
def save_to_file(self, filename=None, chunksize=0):
"""Export form template from database into file."""
......@@ -229,6 +230,7 @@ class cFormTemplate(gmBusinessDBObject.cBusinessDBObject):
return None
return filename
#--------------------------------------------------------
def update_template_from_file(self, filename=None):
gmPG2.file2bytea (
......@@ -238,6 +240,7 @@ class cFormTemplate(gmBusinessDBObject.cBusinessDBObject):
)
# adjust for xmin change
self.refetch_payload()
#--------------------------------------------------------
def instantiate(self):
fname = self.save_to_file()
......@@ -554,6 +557,7 @@ class gmOOoConnector(gmBorg.cBorg):
return self.__desktop
desktop = property(_get_desktop, lambda x:x)
#------------------------------------------------------------
class cOOoLetter(object):
......@@ -718,7 +722,7 @@ class cFormEngine(object):
# # some forms may not have values ...
# if params is None:
# params = {}
# patient_clinical = self.patient.get_emr()
# patient_clinical = self.patient.emr
# encounter = patient_clinical.active_encounter['pk_encounter']
# # FIXME: get_active_episode is no more
# #episode = patient_clinical.get_active_episode()['pk_episode']
......@@ -1169,6 +1173,8 @@ class cLaTeXForm(cFormEngine):
mimetypes = [
u'application/x-latex',
u'application/x-tex',
u'text/latex',
u'text/tex',
u'text/plain'
]
......
......@@ -1174,7 +1174,7 @@ def __import_single_PID_hl7_file(filename, emr=None):
folder = gmPerson.cPatient(emr.pk_patient).document_folder
hl7_docs = folder.get_documents (
doc_type = u'HL7 data',
episodes = [epi['pk_episode']],
pk_episodes = [epi['pk_episode']],
order_by = u'ORDER BY clin_when DESC'
)
if len(hl7_docs) > 0:
......
......@@ -106,6 +106,7 @@ class cKeywordExpansion(gmBusinessDBObject.cBusinessDBObject):
_log.warning('programmed to ignore conversion problems, hoping receiver can handle [%s]', filename)
return filename
#--------------------------------------------------------
def update_data_from_file(self, filename=None):
if not (os.access(filename, os.R_OK) and os.path.isfile(filename)):
......@@ -127,6 +128,7 @@ class cKeywordExpansion(gmBusinessDBObject.cBusinessDBObject):
__keyword_expansions = None
return True
#--------------------------------------------------------
def format(self):
txt = u'%s #%s\n' % (
......
......@@ -44,6 +44,15 @@ LOINC_rr_quantity = ['8478-0', '8448-3', '8449-1', '8456-6', '8457-4', '8458-2',
LOINC_heart_rate_quantity = ['8867-4', '67129-7', '40443-4', '69000-8', '69001-6', '68999-2']
LOINC_inr_quantity = ['34714-6', '46418-0', '6301-6', '38875-1']
#============================================================
def loinc2data(loinc):
cmd = u'SELECT * FROM ref.loinc WHERE code = %(loinc)s'
args = {'loinc': loinc}
rows, idx = gmPG2.run_ro_queries(queries = [{'cmd': cmd, 'args': args}], get_col_idx = False)
if len(rows) == 0:
return []
return rows[0]
#============================================================
def loinc2term(loinc=None):
......@@ -83,6 +92,7 @@ SELECT coalesce (
return []
return [ r[0] for r in rows ]
#============================================================
def split_LOINCDBTXT(input_fname=None, data_fname=None, license_fname=None):
......@@ -404,9 +414,10 @@ _SQL_LOINC_from_any_coded_term = u"""
term %(fragment_condition)s)
"""
#------------------------------------------------------------
class cLOINCMatchProvider(gmMatchProvider.cMatchProvider_SQL2):
_pattern = regex.compile(r'^\D+\s+\D+$', regex.UNICODE | regex.LOCALE)
_pattern = regex.compile(r'^\D+\s+\D+$', regex.UNICODE)
_normal_query = u"""
SELECT DISTINCT ON (list_label)
......@@ -425,12 +436,14 @@ class cLOINCMatchProvider(gmMatchProvider.cMatchProvider_SQL2):
# %
# _SQL_LOINC_from_i18n_coded_term,
# _SQL_LOINC_from_en_EN_coded_term,
#--------------------------------------------------------
def getMatchesByPhrase(self, aFragment):
"""Return matches for aFragment at start of phrases."""
self._queries = [cLOINCMatchProvider._normal_query + u'\nORDER BY list_label\nLIMIT 75']
return gmMatchProvider.cMatchProvider_SQL2.getMatchesByPhrase(self, aFragment)
#--------------------------------------------------------
def getMatchesByWord(self, aFragment):
"""Return matches for aFragment at start of words inside phrases."""
......@@ -446,6 +459,7 @@ class cLOINCMatchProvider(gmMatchProvider.cMatchProvider_SQL2):
self._queries = [cLOINCMatchProvider._normal_query + u'\nORDER BY list_label\nLIMIT 75']
return gmMatchProvider.cMatchProvider_SQL2.getMatchesByWord(self, aFragment)
#--------------------------------------------------------
def getMatchesBySubstr(self, aFragment):
"""Return matches for aFragment as a true substring."""
......@@ -461,6 +475,7 @@ class cLOINCMatchProvider(gmMatchProvider.cMatchProvider_SQL2):
self._queries = [cLOINCMatchProvider._normal_query + u'\nORDER BY list_label\nLIMIT 75']
return gmMatchProvider.cMatchProvider_SQL2.getMatchesBySubstr(self, aFragment)
#============================================================
# main
#------------------------------------------------------------
......
This diff is collapsed.