@article {1313, title = {Open-source electronic health record systems: A systematic review of most recent advances.}, journal = {Health Informatics J}, volume = {28}, year = {2022}, month = {2022 Apr-Jun}, pages = {14604582221099828}, abstract = {

Open-source Electronic Health Records (OS-EHRs) are of pivotal importance in the management, operations, and administration of any healthcare organization. With the advancement of health informatics, researchers and healthcare practitioners have proposed various frameworks to assess the maturation of Open-source EHRs. The significance of OS-EHRs stems from the fact that vendor-based EHR implementations are becoming financially burdensome, with some vendors raking in more than $1 billion with one contract. Contrarily, the adoption of OS-EHRs suffers from a lack of systematic evaluation from the standpoint of a standard reference model. To this end, the Healthcare Information and Management Systems Society (HIMSS) has presented a strategic road map called EMR Adoption and Maturity (EMRAM). The HIMSS-EMRAM model proposes a stage-wise model approach that is globally recognized and can be essentially applied as a benchmark evaluation criteria for open-source EHRs. This paper offers an applied descriptive methodology over the frequently studied open-source EHRs currently operational worldwide or has the potential of adoption in healthcare settings. Besides, we also present profiling (User Support, Developer{\textquoteright} Support, Customization Support, Technical details, and Diagnostic help) of studied OS-EHRs from developer{\textquoteright}s and user{\textquoteright}s perspectives using updated standard metrics. We carried out multi-aspect objective analysis of studied systems covering EHR functions, software based features and implementation. This review portrays systematic aspects of electronic medical record standards for open-source software implementations. As we observed in the literature, prevalent research and working prototypes lack systematic review of the HIMSS-EMRAM model and do not present evolving software features. Therefore, after the application of our assessment measures, the results obtained indicate that OS-EHRs are yet to acquire standard compliance and implementation. The findings in this paper can be beneficial in the planning and implementation of OS-EHRs projects in the future.

}, keywords = {electronic health records, Humans, Medical Informatics, Publications, Software}, issn = {1741-2811}, doi = {10.1177/14604582221099828}, author = {Shaikh, Mohsin and Vayani, Arshad Hm and Akram, Sabina and Qamar, Nafees} } @article {1301, title = {Use of cost-effective technologies for a routine clinical pathology laboratory}, journal = {Lab on a Chip}, year = {2021}, month = {2021}, pages = { - }, abstract = {Classically, the need for highly sophisticated instruments with important economic costs has been a major limiting factor for clinical pathology laboratories, especially in developing countries. With the aim of making clinical pathology more accessible, a wide variety of free or economical technologies have been developed worldwide in the last years. 3D printing and Arduino approaches can provide up to 94\% economical savings in hardware and instrumentation in comparison to commercial alternatives. The vast selection of Point-of-Care-Tests (POCT) currently available also limits the need for specific instruments or personnel, as they can be used almost anywhere and by anyone. Lastly, there are dozens of free and libre digital tools available in health informatics. This review provides an overview of the state-of-the-art on cost-effective alternatives with applications in routine clinical pathology laboratories. In this context, a variety of technologies including 3D printing and Arduino, lateral flow assays, plasmonic biosensors, and microfluidics, as well as laboratory information systems, are discussed. This review aims to serve as an introduction to different technologies that can make clinical pathology more accessible and, therefore, contribute to achieve universal health coverage.}, isbn = {1473-0197}, url = {http://dx.doi.org/10.1039/D1LC00658D}, author = {Vazquez, Mercedes and Anfossi, L. and Ben-Yoav, Hader and Di{\'e}guez, Lorena and Karopka, Thomas and Della Ventura, Bartolomeo and Abalde-Cela, Sara and Minopoli, Antonio and Di Nardo, Fabio and Kumar Shukla, Vikas and Teixeira, Alexandra and Tvarijonaviciute, Asta and Franco, Lorena} } @article {1220, title = {CODE STROKE ALERT-Concept and Development of a Novel Open-Source Platform to Streamline Acute Stroke Management.}, journal = {Front Neurol}, volume = {10}, year = {2019}, month = {2019}, pages = {725}, abstract = {

Effective, time-critical intervention in acute stroke is crucial to mitigate mortality rate and morbidity, but delivery of reperfusion treatments is often hampered by pre-, in-, or inter-hospital system level delays. Disjointed, repetitive, and inefficient communication is a consistent contributor to avoidable treatment delay. In the era of rapid reperfusion therapy for ischemic stroke, there is a need for a communication system to synchronize the flow of clinical information across the entire stroke journey. A multi-disciplinary development team designed an electronic communications platform, integrated between web browsers and a mobile application, to link all relevant members of the stroke treatment pathway. The platform uses tiered notifications, geotagging, incorporates multiple clinical score calculators, and is compliant with security regulations. The system safely saves relevant information for audit and research. Code Stroke Alert is a platform that can be accessed by emergency medical services (EMS) and hospital staff, coordinating the flow of information during acute stroke care, reducing duplication, and error in clinical information handover. Electronic data logs provide an auditable trail of relevant quality improvement metrics, facilitating quality improvement, and research. Code Stroke Alert will be freely available to health networks globally. The open-source nature of the software offers valuable potential for future development of plug-ins and add-ons, based on individual institutional needs. Prospective, multi-site implementation, and measurement of clinical impact are underway.

}, issn = {1664-2295}, doi = {10.3389/fneur.2019.00725}, author = {Seah, Huey Ming and Burney, Moe and Phan, Michael and Shell, Daniel and Wu, Jamin and Zhou, Kevin and Brooks, Owen and Coulton, Bronwyn and Maingard, Julian and Tang, Jennifer and Yazdabadi, Gohar and Tahayori, Bahman and Barras, Christen and Kok, Hong Kuan and Chandra, Ronil and Thijs, Vincent and Brooks, Duncan Mark and Asadi, Hamed} } @article {1230, title = {Data Migration from Operating EMRs to OpenEMR with Mirth Connect.}, journal = {Stud Health Technol Inform}, volume = {257}, year = {2019}, month = {2019}, pages = {288-292}, abstract = {

Electronic medical records (EMR) are integral to the functionality of day to day operations in a clinic. EMRs perform functions like scheduling or hosting medical records used by physicians and other staff [1]. A certain time comes when it is necessary to upgrade or change EMRs to maintain efficiency in a clinic. The most arduous part of changing a clinic{\textquoteright}s EMR is migrating the clinical data from the old EMR to the new. This paper explores the feasibility of data migration between two Electronic Medical Records using open source technologies. This enables smaller clinics to change EMRs when the need arises without incurring huge costs. Using Mirth Connect as a data integration engine and OpenEMR as the new EMR we successfully migrated data from our old EMR to OpenEMR.

}, keywords = {Ambulatory Care Facilities, electronic health records, Humans, Physicians, Software}, issn = {1879-8365}, author = {Lin, Jing and Ranslam, Kyle and Shi, Fang and Figurski, Mike and Liu, Zheng} } @article {1271, title = {The impact of PEPFAR transition on HIV service delivery at health facilities in Uganda.}, journal = {PLoS One}, volume = {14}, year = {2019}, month = {2019}, pages = {e0223426}, abstract = {

BACKGROUND: Since 2004, the President{\textquoteright}s Emergency Plan for AIDS Relief (PEPFAR) has played a large role in Uganda{\textquoteright}s HIV/AIDS response. To better target resources to high burden regions and facilities, PEPFAR planned to withdraw from 29\% of previously-supported health facilities in Uganda between 2015 and 2017.

METHODS: We conducted a cross-sectional survey of 226 PEPFAR-supported health facilities in Uganda in mid-2017. The survey gathered information on availability, perceived quality, and access to HIV services before and after transition. We compare responses for facilities transitioned to those maintained on PEPFAR, accounting for survey design. We also extracted data from DHIS2 for the period October 2013-December 2017 on the number of HIV tests and counseling (HTC), number of patients on antiretroviral therapy (Current on ART), and retention on first-line ART (Retention) at 12 months. Using mixed effect models, we compare trends in service volume around the transition period.

RESULTS: There were 206 facilities that reported transition and 20 that reported maintenance on PEPFAR. Some facilities reporting transition may have been in a gap between implementing partners. The median transition date was September 2016, nine months prior to the survey. Transition facilities were more likely to discontinue HIV outreach following transition (51.6\% vs. 1.4\%, p<0.001) and to report declines in HIV care access (43.5\% vs. 3.1\%, p<0.001) and quality (35.6\% vs. 0\%, p<0.001). However, transition facilities did not differ in their trends in HIV service volume relative to maintenance facilities.

CONCLUSIONS: Transition from PEPFAR resulted in facilities reporting worsening patient access and service quality for HIV care, but there is insufficient evidence to suggest negative impacts on volume of HIV services. Facility respondents{\textquoteright} perceptions about access and quality may be overly pessimistic, or they may signal forthcoming impacts. Unrelated to transition, declining retention on ART in Uganda is a cause for concern.

}, issn = {1932-6203}, doi = {10.1371/journal.pone.0223426}, author = {Wilhelm, Jess Alan and Qiu, Mary and Paina, Ligia and Colantuoni, Elizabeth and Mukuru, Moses and Ssengooba, Freddie and Bennett, Sara} } @article {1278, title = {Large care gaps in primary care management of asthma: a longitudinal practice audit.}, journal = {BMJ Open}, volume = {9}, year = {2019}, month = {2019 01 29}, pages = {e022506}, abstract = {

OBJECTIVES: Care gaps in asthma may be highly prevalent but are poorly characterised. We sought to prospectively measure adherence to key evidence-based adult asthma practices in primary care, and predictors of these behaviours.

DESIGN: One-year prospective cohort study employing an electronic chart audit.

SETTING: Three family health teams (two academic, one community-based) in Ontario, Canada.

PARTICIPANTS: 884 patients (72.1\% female; 46.0{\textpm}17.5 years old) (4199 total visits; 4.8{\textpm}4.8 visits/patient) assigned to 23 physicians (65\% female; practising for 10.0{\textpm}8.6 years).

MAIN OUTCOME MEASURES: The primary outcome was the proportion of visits during which practitioners assessed asthma control according to symptom-based criteria. Secondary outcomes included the proportion of: patients who had asthma control assessed at least once; visits during which a controller medication was initiated or escalated; and patients who received a written asthma action plan. Behavioural predictors were established a priori and tested in a multivariable model.

RESULTS: Primary outcome: Providers assessed asthma control in 4.9\% of visits and 15.4\% of patients. Factors influencing assessment included clinic site (p=0.019) and presenting symptom, with providers assessing control more often during visits for asthma symptoms (35.0\%) or any respiratory symptoms (18.8\%) relative to other visits (1.6\%) (p<0.01).

SECONDARY OUTCOMES: Providers escalated controller therapy in 3.3\% of visits and 15.4\% of patients. Factors influencing escalation included clinic site, presenting symptom and prior objective asthma diagnosis. Escalation occurred more frequently during visits for asthma symptoms (21.0\%) or any respiratory symptoms (11.9\%) relative to other visits (1.5\%) (p<0.01) and in patients without a prior objective asthma diagnosis (3.5\%) relative to those with (1.3\%) (p=0.025). No asthma action plans were delivered.

CONCLUSIONS: Major gaps in evidence-based asthma practice exist in primary care. Targeted knowledge translation interventions are required to address these gaps, and can be tailored by leveraging the identified behavioural predictors.

TRIAL REGISTRATION NUMBER: NCT01070095; Pre-results.

}, issn = {2044-6055}, doi = {10.1136/bmjopen-2018-022506}, author = {Price, Courtney and Agarwal, Gina and Chan, David and Goel, Sanjeev and Kaplan, Alan G and Boulet, Louis-Philippe and Mamdani, Muhammad M and Straus, Sharon E and Lebovic, Gerald and Gupta, Samir} } @article {1210, title = {Open Source Infrastructure for Health Care Data Integration and Machine Learning Analyses.}, journal = {JCO Clin Cancer Inform}, volume = {3}, year = {2019}, month = {2019 Aug}, pages = {1-16}, abstract = {

PURPOSE: We have created a cloud-based machine learning system (CLOBNET) that is an open-source, lean infrastructure for electronic health record (EHR) data integration and is capable of extract, transform, and load (ETL) processing. CLOBNET enables comprehensive analysis and visualization of structured EHR data. We demonstrate the utility of CLOBNET by predicting primary therapy outcomes of patients with high-grade serous ovarian cancer (HGSOC) on the basis of EHR data.

MATERIALS AND METHODS: CLOBNET is built using open-source software to make data preprocessing, analysis, and model training user friendly. The source code of CLOBNET is available in GitHub. The HGSOC data set was based on a prospective cohort of 208 patients with HGSOC who were treated at Turku University Hospital, Finland, from 2009 to 2019 for whom comprehensive clinical and EHR data were available.

RESULTS: We trained machine learning (ML) models using clinical data, including a herein developed dissemination score that quantifies the disease burden at the time of diagnosis, to identify patients with progressive disease (PD) or a complete response (CR) on the basis of RECIST (version 1.1). The best performance was achieved with a logistic regression model, which resulted in an area under receiver operating characteristic curve (AUROC) of 0.86, with a specificity of 73\% and a sensitivity of 89\%, when it classified between patients who experienced PD and CR.

CONCLUSION: We have developed an open-source computational infrastructure, CLOBNET, that enables effective and rapid analysis of EHR and other clinical data. Our results demonstrate that CLOBNET allows predictions to be made on the basis of EHR data to address clinically relevant questions.

}, issn = {2473-4276}, doi = {10.1200/CCI.18.00132}, author = {Isoviita, Veli-Matti and Salminen, Liina and Azar, Jimmy and Lehtonen, Rainer and Roering, Pia and Carp{\'e}n, Olli and Hietanen, Sakari and Gr{\'e}nman, Seija and Hynninen, Johanna and F{\"a}rkkil{\"a}, Anniina and Hautaniemi, Sampsa} } @article {1211, title = {Open source software security vulnerability detection based on dynamic behavior features.}, journal = {PLoS One}, volume = {14}, year = {2019}, month = {2019}, pages = {e0221530}, abstract = {

Open source software has been widely used in various industries due to its openness and flexibility, but it also brings potential security problems. Therefore, security analysis is required before using open source software. The current mainstream open source software vulnerability analysis technology is based on source code, and there are problems such as false positives, false negatives and restatements. In order to solve the problems, based on the further study of behavior feature extraction and vulnerability detection technology, a method of using dynamic behavior features to detect open source software vulnerabilities is proposed. Firstly, the relationship between open source software vulnerability and API call sequence is studied. Then, the behavioral risk vulnerability database of open source software is proposed as a support for vulnerability detection. In addition, the CNN-IndRNN classification model is constructed by improving the Independently Recurrent Neural Net-work (IndRNN) algorithm and applies to open source software security vulnerability detection. The experimental results verify the effectiveness of the proposed open source software security vulnerability detection method based on dynamic behavior features.

}, issn = {1932-6203}, doi = {10.1371/journal.pone.0221530}, author = {Li, Yuancheng and Ma, Longqiang and Shen, Liang and Lv, Junfeng and Zhang, Pan} } @article {1228, title = {PatientExploreR: an extensible application for dynamic visualization of patient clinical history from Electronic Health Records in the OMOP Common Data Model Title.}, journal = {Bioinformatics}, year = {2019}, month = {2019 Jun 19}, abstract = {

MOTIVATION: Electronic Health Records (EHR) are quickly becoming omnipresent in healthcare, but interoperability issues and technical demands limit their use for biomedical and clinical research. Interactive and flexible software that interfaces directly with EHR data structured around a common data model could accelerate more EHR-based research by making the data more accessible to researchers who lack computational expertise and/or domain knowledge.

RESULTS: We present PatientExploreR, an extensible application built on the R/Shiny framework that interfaces with a relational database of EHR data in the Observational Medical Outcomes Partnership Common Data Model (CDM) format. PatientExploreR produces patient-level interactive and dynamic reports and facilitates visualization of clinical data without any programming required. It allows researchers to easily construct and export patient cohorts from the EHR for analysis with other software. This application could enable easier exploration of patient-level data for physicians and researchers. PatientExploreR can incorporate EHR data from any institution that employs the CDM for users with approved access. The software code is free and open-source under the MIT license, enabling institutions to install and users to expand and modify the application for their own purposes.

AVAILABILITY: PatientExploreR can be freely obtained from GitHub: https://github.com/BenGlicksberg/PatientExploreR. We provide instructions for how researchers with approved access to their institutional EHR can use this package. We also release an open sandbox server of synthesized patient data for users without EHR access to explore: http://patientexplorer.ucsf.edu.

SUPPLEMENTARY INFORMATION: Supplementary data are available at Bioinformatics online.

}, issn = {1367-4811}, doi = {10.1093/bioinformatics/btz409}, author = {Glicksberg, Benjamin S and Oskotsky, Boris and Thangaraj, Phyllis M and Giangreco, Nicholas and Badgeley, Marcus A and Johnson, Kipp W and Datta, Debajyoti and Rudrapatna, Vivek and Rappoport, Nadav and Shervey, Mark M and Miotto, Riccardo and Goldstein, Theodore C and Rutenberg, Eugenia and Frazier, Remi and Lee, Nelson and Israni, Sharat and Larsen, Rick and Percha, Bethany and Li, Li and Dudley, Joel T and Tatonetti, Nicholas P and Butte, Atul J} } @article {1269, title = {Quality of routine facility data for monitoring priority maternal and newborn indicators in DHIS2: A case study from Gombe State, Nigeria.}, journal = {PLoS One}, volume = {14}, year = {2019}, month = {2019}, pages = {e0211265}, abstract = {

INTRODUCTION: Routine health information systems are critical for monitoring service delivery. District Heath Information System, version 2 (DHIS2) is an open source software platform used in more than 60 countries, on which global initiatives increasingly rely for such monitoring. We used facility-reported data in DHIS2 for Gombe State, north-eastern Nigeria, to present a case study of data quality to monitor priority maternal and neonatal health indicators.

METHODS: For all health facilities in DHIS2 offering antenatal and postnatal care services (n = 497) and labor and delivery services (n = 486), we assessed the quality of data for July 2016-June 2017 according to the World Health Organization data quality review guidance. Using data from DHIS2 as well as external facility-level and population-level household surveys, we reviewed three data quality dimensions-completeness and timeliness, internal consistency, and external consistency-and considered the opportunities for improvement.

RESULTS: Of 14 priority maternal and neonatal health indicators that could be tracked through facility-based data, 12 were included in Gombe{\textquoteright}s DHIS2. During July 2016-June 2017, facility-reported data in DHIS2 were incomplete at least 40\% of the time, under-reported 10\%-60\% of the events documented in facility registers, and showed inconsistencies over time, between related indicators, and with an external data source. The best quality data elements were those that aligned with Gombe{\textquoteright}s health program priorities, particularly older health programs, and those that reflected contact indicators rather than indicators related to the provision of commodities or content of care.

CONCLUSION: This case study from Gombe State, Nigeria, demonstrates the high potential for effective monitoring of maternal and neonatal health using DHIS2. However, coordinated action at multiple levels of the health system is needed to maximize reporting of existing data; rationalize data flow; routinize data quality review, feedback, and supervision; and ensure ongoing maintenance of DHIS2.

}, keywords = {Adolescent, Adult, Female, Guidelines as Topic, Health information systems, Humans, Infant Health, Infant, Newborn, Maternal Health, Middle Aged, Nigeria, Pregnancy, Quality Indicators, Health Care, Young Adult}, issn = {1932-6203}, doi = {10.1371/journal.pone.0211265}, author = {Bhattacharya, Antoinette Alas and Umar, Nasir and Audu, Ahmed and Felix, Habila and Allen, Elizabeth and Schellenberg, Joanna R M and Marchant, Tanya} } @article {1221, title = {RADAR-Base: Open Source Mobile Health Platform for Collecting, Monitoring, and Analyzing Data Using Sensors, Wearables, and Mobile Devices.}, journal = {JMIR Mhealth Uhealth}, volume = {7}, year = {2019}, month = {2019 08 01}, pages = {e11734}, abstract = {

BACKGROUND: With a wide range of use cases in both research and clinical domains, collecting continuous mobile health (mHealth) streaming data from multiple sources in a secure, highly scalable, and extensible platform is of high interest to the open source mHealth community. The European Union Innovative Medicines Initiative Remote Assessment of Disease and Relapse-Central Nervous System (RADAR-CNS) program is an exemplary project with the requirements to support the collection of high-resolution data at scale; as such, the Remote Assessment of Disease and Relapse (RADAR)-base platform is designed to meet these needs and additionally facilitate a new generation of mHealth projects in this nascent field.

OBJECTIVE: Wide-bandwidth networks, smartphone penetrance, and wearable sensors offer new possibilities for collecting near-real-time high-resolution datasets from large numbers of participants. The aim of this study was to build a platform that would cater for large-scale data collection for remote monitoring initiatives. Key criteria are around scalability, extensibility, security, and privacy.

METHODS: RADAR-base is developed as a modular application; the backend is built on a backbone of the highly successful Confluent/Apache Kafka framework for streaming data. To facilitate scaling and ease of deployment, we use Docker containers to package the components of the platform. RADAR-base provides 2 main mobile apps for data collection, a Passive App and an Active App. Other third-Party Apps and sensors are easily integrated into the platform. Management user interfaces to support data collection and enrolment are also provided.

RESULTS: General principles of the platform components and design of RADAR-base are presented here, with examples of the types of data currently being collected from devices used in RADAR-CNS projects: Multiple Sclerosis, Epilepsy, and Depression cohorts.

CONCLUSIONS: RADAR-base is a fully functional, remote data collection platform built around Confluent/Apache Kafka and provides off-the-shelf components for projects interested in collecting mHealth datasets at scale.

}, issn = {2291-5222}, doi = {10.2196/11734}, author = {Ranjan, Yatharth and Rashid, Zulqarnain and Stewart, Callum and Conde, Pauline and Begale, Mark and Verbeeck, Denny and Boettcher, Sebastian and Dobson, Richard and Folarin, Amos} } @article {1234, title = {Secondary Data Use in Rwanda: Leveraging OpenMRS for Global HIV Research.}, journal = {Stud Health Technol Inform}, volume = {264}, year = {2019}, month = {2019 Aug 21}, pages = {1732}, abstract = {

The Rwandan Ministry of Health supports a countrywide installation of the Open Medical Record System (OpenMRS) to improve clinical recordkeeping and patient care. However, electronic medical records also can be a valuable source of data for observational and experimental studies. We describe the challenges and lessons learned when reusing OpenMRS data in Rwanda for global HIV epidemiology research.

}, keywords = {Biomedical Research, electronic health records, Epidemiologic Studies, HIV, Humans, Rwanda}, issn = {1879-8365}, doi = {10.3233/SHTI190620}, author = {Muhoza, Benjamin and Remera, Eric and Shi, Qiuhu and Kabahizi, Jules and Brazier, Ellen and Sinayobye, Jean d{\textquoteright}Amour and Duda, Stephany N} } @article {1217, title = {STEM: An Open Source Tool for Disease Modeling.}, journal = {Health Secur}, volume = {17}, year = {2019}, month = {2019 Jul/Aug}, pages = {291-306}, abstract = {

The Spatiotemporal Epidemiologic Modeler (STEM) is an open source software project supported by the Eclipse Foundation and used by a global community of researchers and public health officials working to track and, when possible, control outbreaks of infectious disease in human and animal populations. STEM is not a model or a tool designed for a specific disease; it is a flexible, modular framework supporting exchange and integration of community models, reusable plug-in components, and denominator data, available to researchers worldwide at www.eclipse.org/stem. A review of multiple projects illustrates its capabilities. STEM has been used to study variations in transmission of seasonal influenza in Israel by strains; evaluate social distancing measures taken to curb the H1N1 epidemic in Mexico City; study measles outbreaks in part of London and inform local policy on immunization; and gain insights into H7N9 avian influenza transmission in China. A multistrain dengue fever model explored the roles of the mosquito vector, cross-strain immunity, and antibody response in the frequency of dengue outbreaks. STEM has also been used to study the impact of variations in climate on malaria incidence. During the Ebola epidemic, a weekly conference call supported the global modeling community; subsequent work modeled the impact of behavioral change and tested disease reintroduction via animal reservoirs. Work in Germany tracked salmonella in pork from farm to fork; and a recent doctoral dissertation used the air travel feature to compare the potential threats posed by weaponizing infectious diseases. Current projects include work in Great Britain to evaluate control strategies for parasitic disease in sheep, and in Germany and Hungary, to validate the model and inform policy decisions for African swine fever. STEM Version 4.0.0, released in early 2019, includes tools used in these projects and updates technical aspects of the framework to ease its use and re-use.

}, issn = {2326-5108}, doi = {10.1089/hs.2019.0018}, author = {Douglas, Judith V and Bianco, Simone and Edlund, Stefan and Engelhardt, Tekla and Filter, Matthias and G{\"u}nther, Taras and Hu, Kun Maggie and Nixon, Emily J and Sevilla, Nereyda L and Swaid, Ahmad and Kaufman, James H} } @article {1232, title = {Using HL7 FHIR to achieve interoperability in patient health record.}, journal = {J Biomed Inform}, volume = {94}, year = {2019}, month = {2019 Jun}, pages = {103188}, abstract = {

The rapid growth and acceptance of Electronic Health Records (EHRs) and standards to exchange EHRs have improved various aspects of health practices and patient care. However, the data captured in an EHR is only accessible to the providers and specialists within an organization, but not the patient. The concept of a Personal Health Record (PHR) is to allow the patients to record and manage their health data beyond EHR and if possible, see the EHR data in the PHR. Experts agree that bi-directional communication between the PHR and EHR allows the PHR to be more effective and a valuable tool for both the providers and patients. Communicating near real-time patient recorded data in PHR with an EHR will allow the provider(s) to make appropriate clinical decisions and patients can see any changes to his/her diagnostics or treatment plans. This research explores and critically analyzes HL7 FHIR to design and prototype an interoperable mobile PHR that conforms to the HL7 PHR Functional Model and allows bi-directional communication with OpenEMR.

}, issn = {1532-0480}, doi = {10.1016/j.jbi.2019.103188}, author = {Saripalle, Rishi and Runyan, Christopher and Russell, Mitchell} } @article {1268, title = {Census-independent population mapping in northern Nigeria.}, journal = {Remote Sens Environ}, volume = {204}, year = {2018}, month = {2018 Jan}, pages = {786-798}, abstract = {

Although remote sensing has long been used to aid in the estimation of population, it has usually been in the context of spatial disaggregation of national census data, with the census counts serving both as observational data for specifying models and as constraints on model outputs. Here we present a framework for estimating populations from the bottom up, entirely independently of national census data, a critical need in areas without recent and reliable census data. To make observations of population density, we replace national census data with a microcensus, in which we enumerate population for a sample of small areas within the states of Kano and Kaduna in northern Nigeria. Using supervised texture-based classifiers with very high resolution satellite imagery, we produce a binary map of human settlement at 8-meter resolution across the two states and then a more refined classification consisting of 7 residential types and 1 non-residential type. Using the residential types and a model linking them to the population density observations, we produce population estimates across the two states in a gridded raster format, at approximately 90-meter resolution. We also demonstrate a simulation framework for capturing uncertainty and presenting estimates as prediction intervals for any region of interest of any size and composition within the study region. Used in concert with previously published demographic estimates, our population estimates allowed for predictions of the population under 5 in ten administrative wards that fit strongly with reference data collected during polio vaccination campaigns.

}, issn = {0034-4257}, doi = {10.1016/j.rse.2017.09.024}, author = {Weber, Eric M and Seaman, Vincent Y and Stewart, Robert N and Bird, Tomas J and Tatem, Andrew J and McKee, Jacob J and Bhaduri, Budhendra L and Moehl, Jessica J and Reith, Andrew E} } @article {1272, title = {Data for decision making: using a dashboard to strengthen routine immunisation in Nigeria.}, journal = {BMJ Glob Health}, volume = {3}, year = {2018}, month = {2018}, pages = {e000807}, abstract = {

Availability of reliable data has for a long time been a challenge for health programmes in Nigeria. Routine immunisation (RI) data have always been characterised by conflicting coverage figures for the same vaccine across different routine data reporting platforms. Following the adoption of District Health Information System version 2 (DHIS2) as a national electronic data management platform, the DHIS2 RI Dashboard Project was initiated to address the absence of some RI-specific indicators on DHIS2. The project was also intended to improve visibility and monitoring of RI indicators as well as strengthen the broader national health management information system by promoting the use of routine data for decision making at all governance levels. This paper documents the process, challenges and lessons learnt in implementing the project in Nigeria. A multistakeholder technical working group developed an implementation framework with clear preimplementation; implementation and postimplementation activities. Beginning with a pilot in Kano state in 2014, the project has been scaled up countrywide. Nearly 34 000 health workers at all administrative levels were trained on RI data tools and DHIS2 use. The project contributed to the improvement in completeness of reports on DHIS2 from 53 \% in first quarter 2014 to 81 \% in second quarter 2017. The project faced challenges relating to primary healthcare governance structures at the subnational level, infrastructure and human resource capacity. Our experience highlights the need for early and sustained advocacy to stakeholders in a decentralised health system to promote ownership and sustainability of a centrally coordinated systems strengthening initiative.

}, issn = {2059-7908}, doi = {10.1136/bmjgh-2018-000807}, author = {Etamesor, Sulaiman and Ottih, Chibuzo and Salihu, Ismail Ndalami and Okpani, Arnold Ikedichi} } @article {1293, title = {Designing mHealth for maternity services in primary health facilities in a low-income setting - lessons from a partially successful implementation.}, journal = {BMC Med Inform Decis Mak}, volume = {18}, year = {2018}, month = {2018 11 12}, pages = {96}, abstract = {

BACKGROUND: Increasing mobile phone ownership, functionality and access to mobile-broad band internet services has triggered growing interest to harness the potential of mobile phone technology to improve health services in low-income settings. The present project aimed at designing an mHealth system that assists midlevel health workers to provide better maternal health care services by automating the data collection and decision-making process. This paper describes the development process and technical aspects of the system considered critical for possible replication. It also highlights key lessons learned and challenges during implementation.

METHODS: The mHealth system had front-end and back-end components. The front-end component was implemented as a mobile based application while the back-end component was implemented as a web-based application that ran on a central server for data aggregation and report generation. The current mHealth system had four applications; namely, data collection/reporting, electronic health records, decision support, and provider education along the continuum of care including antenatal, delivery and postnatal care. The system was pilot-tested and deployed in selected health centers of North Shewa Zone, Amhara region, Ethiopia.

RESULTS: The system was used in 5 health centers since Jan 2014 and later expanded to additional 10 health centers in June 2016 with a total of 5927 electronic forms submitted to the back-end system. The submissions through the mHealth system were slightly lower compared to the actual number of clients who visited those facilities as verified by record reviews. Regarding timeliness, only 11\% of the electronic forms were submitted on the day of the client visit, while an additional 17\% of the forms were submitted within 10~days of clients{\textquoteright} visit. On average forms were submitted 39~days after the day of clients visit with a range of 0 to 150~days.

CONCLUSIONS: In conclusion, the study illustrated that an effective mHealth intervention can be developed using an open source platform and local resources. The system impacted key health outcomes and contributed to timely and complete data submission. Lessons learned through the process including success factors and challenges are discussed.

}, keywords = {Cell Phone, Delivery of Health Care, electronic health records, Ethiopia, Female, Health Facilities, Humans, Maternal Health Services, Mobile Applications, Poverty, Pregnancy, Telemedicine}, issn = {1472-6947}, doi = {10.1186/s12911-018-0704-9}, author = {Shiferaw, Solomon and Workneh, Andualem and Yirgu, Robel and Dinant, Geert-Jan and Spigt, Mark} } @article {1282, title = {Effect of glycosylated hemoglobin on response to ranibizumab therapy in diabetic macular edema: real-world outcomes in 312 patients.}, journal = {Can J Ophthalmol}, volume = {53}, year = {2018}, month = {2018 08}, pages = {415-419}, abstract = {

OBJECTIVE: To investigate the effect of serum glycosylated hemoglobin (HbA1c) on the outcomes of ranibizumab therapy for diabetic macular edema (DME).

DESIGN: Retrospective cohort study.

PARTICIPANTS: Patients receiving ranibizumab injections for centre-involving DME in a National Health Service setting.

METHODS: The Moorfields OpenEyes database was used to study eyes with DME treated with ranibizumab from October 2013 to November 2015 at the Moorfields City Road, Ealing, Northwick Park, and St George{\textquoteright}s Hospital sites. Only eyes receiving a minimum of 3 injections and completing 12 months of follow-up were included. If both eyes received treatment, the first eye treated was analyzed. When both eyes received initial treatment simultaneously, random number tables were used to select the eye for analysis. HbA1c was tested at the initiation of ranibizumab treatment. Multivariate regression analysis was used to identify relationships between HbA1c and the outcome measures.

OUTCOMES: The primary outcome was change in visual acuity (VA) Early Treatment of Diabetic Retinopathy study (ETDRS) letters. The secondary outcomes were change in central subfield thickness (CSFT) and macular volume (MV), as well as number of injections in year 1.

RESULTS: Three hundred and twelve eyes of 312 patients were included in the analysis. HbA1c was not related to change in VA (p = 0.577), change in CSFT (p = 0.099), change in MV (p = 0.082), or number of injections in year 1 (p = 0.859).

CONCLUSIONS: HbA1c is not related to functional or anatomical outcomes at 1 year in DME treated with ranibizumab.

}, keywords = {Aged, Angiogenesis Inhibitors, Biomarkers, Diabetic Retinopathy, Female, Follow-Up Studies, Glycated Hemoglobin A, Humans, Intravitreal Injections, Macula Lutea, Macular Edema, Male, Middle Aged, Ranibizumab, Retrospective Studies, Tomography, Optical Coherence, Treatment Outcome, Vascular Endothelial Growth Factor A, Visual Acuity}, issn = {1715-3360}, doi = {10.1016/j.jcjo.2017.10.008}, author = {Shalchi, Zaid and Okada, Mali and Bruynseels, Alice and Palethorpe, David and Yusuf, Ammar and Hussain, Rohan and Herrspiegel, Christina and Scazzarriello, Antonio and Habib, Abubakar and Amin, Razia and Rajendram, Ranjan} } @article {1290, title = {mLearning in the Democratic Republic of the Congo: A Mixed-Methods Feasibility and Pilot Cluster Randomized Trial Using the Safe Delivery App.}, journal = {Glob Health Sci Pract}, volume = {6}, year = {2018}, month = {2018 12 27}, pages = {693-710}, abstract = {

BACKGROUND: Substandard delivery care has been widely documented as a major cause of maternal mortality in health facilities globally. Health worker learning via mobile devices is increasing rapidly; however, there is little evidence of mLearning effectiveness. This study sought to determine the feasibility, acceptability, and potential effect of the Safe Delivery App (SDA) on health workers{\textquoteright} practices in basic emergency obstetric and newborn care (BEmONC) in the Democratic Republic of the Congo (DRC). The Theoretical Domains Framework was used to guide this research.

METHODS: Eight BEmONC facilities in central DRC were randomized to either an mLearning intervention or to standard practice (control). Maternal and newborn health workers in intervention facilities (n=64) were trained on the use of smartphones and the French version of the SDA. The SDA is an evidence-based BEmONC training resource with visual guidance using animated videos and clinical management instructions developed by the Maternity Foundation and the Universities of Copenhagen and Southern Denmark. Knowledge on postpartum hemorrhage (PPH) and neonatal resuscitation (NR) and self-confidence in performing 12 BEmONC procedures were assessed at baseline and at 3 months post-intervention. Eighteen qualitative interviews were conducted with app users and key stakeholders to assess feasibility and acceptability of mLearning and the use of the SDA. Maternal mortality was compared in intervention and control facilities using a smartphone-based Open Data Kit (ODK) data application. One smartphone with SDA and ODK was entrusted to intervention facilities for the study period, whereas control facilities received smartphones with ODK only.

RESULTS: The analysis included 62 heath workers. Knowledge scores on postpartum hemorrhage and neonatal resuscitation increased significantly from baseline among intervention participants compared with controls at 3 months post-intervention (mean difference for PPH knowledge, 17.4 out of 100; 95\% confidence interval [CI]=10.7 to 24.0 and 19.4 for NR knowledge; 95\% CI=11.4 to 27.4), as did self-confidence scores on 12 essential BEmONC procedures (mean difference, 4.2 out of 48; CI=0.7 to 7.7). Increases were unaffected by health worker cadre and previous smartphone use. Qualitative interviews supported the feasibility and acceptability of the SDA and mLearning, and the potential for it to impact maternal and neonatal mortality in the DRC.

CONCLUSION: Use of the Safe Delivery App supported increased health worker knowledge and self-confidence in the management of obstetric and newborn emergencies after 3 months. SDA and mLearning were found to be feasible and acceptable to health workers and key stakeholders in the DRC.

}, keywords = {Congo, Feasibility Studies, Female, Humans, Infant Care, Infant, Newborn, Interviews as Topic, Male, Maternal Mortality, Mobile Applications, Obstetrics, Pilot Projects, qualitative research, Quality of Health Care}, issn = {2169-575X}, doi = {10.9745/GHSP-D-18-00275}, author = {Bolan, Nancy E and Sthreshley, Larry and Ngoy, Bernard and Ledy, Faustin and Ntayingi, Mano and Makasy, Davis and Mbuyi, Marie-Claude and Lowa, Gisele and Nemeth, Lynne and Newman, Susan} } @article {1235, title = {A Platform for Innovation and Standards Evaluation: a Case Study from the OpenMRS Open-Source Radiology Information System.}, journal = {J Digit Imaging}, volume = {31}, year = {2018}, month = {2018 06}, pages = {361-370}, abstract = {

Open-source development can provide a platform for innovation by seeking feedback from community members as well as providing tools and infrastructure to test new standards. Vendors of proprietary systems may delay adoption of new standards until there are sufficient incentives such as legal mandates or financial incentives to encourage/mandate adoption. Moreover, open-source systems in healthcare have been widely adopted in low- and middle-income countries and can be used to bridge gaps that exist in global health radiology. Since 2011, the authors, along with a community of open-source contributors, have worked on developing an open-source radiology information system (RIS) across two communities-OpenMRS and LibreHealth. The main purpose of the RIS is to implement core radiology workflows, on which others can build and test new radiology standards. This work has resulted in three major releases of the system, with current architectural changes driven by changing technology, development of new standards in health and imaging informatics, and changing user needs. At their core, both these communities are focused on building general-purpose EHR systems, but based on user contributions from the fringes, we have been able to create an innovative system that has been used by hospitals and clinics in four different countries. We provide an overview of the history of the LibreHealth RIS, the architecture of the system, overview of standards integration, describe challenges of developing an open-source product, and future directions. Our goal is to attract more participation and involvement to further develop the LibreHealth RIS into an Enterprise Imaging System that can be used in other clinical imaging including pathology and dermatology.

}, keywords = {Diagnostic Imaging, Humans, Radiology Information Systems, Software, Systems Integration, Workflow}, issn = {1618-727X}, doi = {10.1007/s10278-018-0088-5}, author = {Gichoya, Judy W and Kohli, Marc and Ivange, Larry and Schmidt, Teri S and Purkayastha, Saptarshi} } @article {1224, title = {Towards Implementation of OMOP in a German University Hospital Consortium.}, journal = {Appl Clin Inform}, volume = {9}, year = {2018}, month = {2018 01}, pages = {54-61}, abstract = {

BACKGROUND: In 2015, the German Federal Ministry of Education and Research initiated a large data integration and data sharing research initiative to improve the reuse of data from patient care and translational research. The Observational Medical Outcomes Partnership (OMOP) common data model and the Observational Health Data Sciences and Informatics (OHDSI) tools could be used as a core element in this initiative for harmonizing the terminologies used as well as facilitating the federation of research analyses across institutions.

OBJECTIVE: To realize an OMOP/OHDSI-based pilot implementation within a consortium of eight German university hospitals, evaluate the applicability to support data harmonization and sharing among them, and identify potential enhancement requirements.

METHODS: The vocabularies and terminological mapping required for importing the fact data were prepared, and the process for importing the data from the source files was designed. For eight German university hospitals, a virtual machine preconfigured with the OMOP database and the OHDSI tools as well as the jobs to import the data and conduct the analysis was provided. Last, a federated/distributed query to test the approach was executed.

RESULTS: While the mapping of ICD-10 German Modification succeeded with a rate of 98.8\% of all terms for diagnoses, the procedures could not be mapped and hence an extension to the OMOP standard terminologies had to be made.Overall, the data of 3 million inpatients with approximately 26 million conditions, 21 million procedures, and 23 million observations have been imported.A federated query to identify a cohort of colorectal cancer patients was successfully executed and yielded 16,701 patient cases visualized in a Sunburst plot.

CONCLUSION: OMOP/OHDSI is a viable open source solution for data integration in a German research consortium. Once the terminology problems can be solved, researchers can build on an active community for further development.

}, keywords = {Cooperative Behavior, Germany, Health Plan Implementation, Hospitals, University, Humans, Outcome Assessment (Health Care), Surveys and Questionnaires, Vocabulary}, issn = {1869-0327}, doi = {10.1055/s-0037-1617452}, author = {Maier, C and Lang, L and Storf, H and Vormstein, P and Bieber, R and Bernarding, J and Herrmann, T and Haverkamp, C and Horki, P and Laufer, J and Berger, F and H{\"o}ning, G and Fritsch, H W and Sch{\"u}ttler, J and Ganslandt, T and Prokosch, H U and Sedlmayr, M} } @article {1270, title = {Coverage of routine reporting on malaria parasitological testing in Kenya, 2015-2016.}, journal = {Glob Health Action}, volume = {10}, year = {2017}, month = {2017}, pages = {1413266}, abstract = {

BACKGROUND: Following the launch of District Health Information System 2 across facilities in Kenya, more health facilities are now capable of carrying out malaria parasitological testing and reporting data as part of routine health information systems, improving the potential value of routine data for accurate and timely tracking of rapidly changing disease epidemiology at fine spatial resolutions.

OBJECTIVES: This study evaluates the current coverage and completeness of reported malaria parasitological testing data in DHIS2 specifically looking at patterns in geographic coverage of public health facilities in Kenya.

METHODS: Monthly facility level data on malaria parasitological testing were extracted from Kenya DHIS2 between November 2015 and October 2016. DHIS2 public facilities were matched to a geo-coded master facility list to obtain coordinates. Coverage was defined as the geographic distribution of facilities reporting any data by region. Completeness of reporting was defined as the percentage of facilities reporting any data for the whole 12-month period or for 3, 6 and 9 months.

RESULTS: Public health facilities were 5,933 (59\%) of 10,090 extracted. Fifty-nine per Cent of the public facilities did not report any data while 36, 29 and 22\% facilities had data reported at least 3, 6 and 9 months, respectively. Only 8\% of public facilities had data reported for every month. There were proportionately more hospitals (86\%) than health centres (76\%) and dispensaries/clinics (30\%) reporting. There were significant geographic variations in reporting rates. Counties along the malaria endemic coast had the lowest reporting rate with only 1\% of facilities reporting consistently for 12 months.

CONCLUSION: Current coverage and completeness of reporting of malaria parasitological diagnosis across Kenya{\textquoteright}s public health system remains poor. The usefulness of routine data to improve our understanding of sub-national heterogeneity across Kenya would require significant improvements to the consistency and coverage of data captured by DHIS2.

}, keywords = {Health Facilities, Health information systems, Humans, Kenya, Malaria, Mandatory Reporting, Public Health Surveillance}, issn = {1654-9880}, doi = {10.1080/16549716.2017.1413266}, author = {Maina, Joseph K and Macharia, Peter M and Ouma, Paul O and Snow, Robert W and Okiro, Emelda A} } @article {1238, title = {Design and implementation of an affordable, public sector electronic medical record in rural Nepal.}, journal = {J Innov Health Inform}, volume = {24}, year = {2017}, month = {2017 Jun 23}, pages = {862}, abstract = {

INTRODUCTION: Globally, electronic medical records are central to the infrastructure of modern healthcare systems. Yet the vast majority of electronic medical records have been designed for resource-rich environments and are not feasible in settings of poverty. Here we describe the design and implementation of an electronic medical record at a public sector district hospital in rural Nepal, and its subsequent expansion to an additional public sector facility.DevelopmentThe electronic medical record was designed to solve for the following elements of public sector healthcare delivery: 1) integration of the systems across inpatient, surgical, outpatient, emergency, laboratory, radiology, and pharmacy sites of care; 2) effective data extraction for impact evaluation and government regulation; 3) optimization for longitudinal care provision and patient tracking; and 4) effectiveness for quality improvement initiatives.

APPLICATION: For these purposes, we adapted Bahmni, a product built with open-source components for patient tracking, clinical protocols, pharmacy, laboratory, imaging, financial management, and supply logistics. In close partnership with government officials, we deployed the system in February of 2015, added on additional functionality, and iteratively improved the system over the following year. This experience enabled us then to deploy the system at an additional district-level hospital in a different part of the country in under four weeks. We discuss the implementation challenges and the strategies we pursued to build an electronic medical record for the public sector in rural Nepal.DiscussionOver the course of 18 months, we were able to develop, deploy and iterate upon the electronic medical record, and then deploy the refined product at an additional facility within only four weeks. Our experience suggests the feasibility of an integrated electronic medical record for public sector care delivery even in settings of rural poverty.

}, keywords = {Delivery of Health Care, Integrated, electronic health records, Global Health, Health Plan Implementation, Hospitals, Public, Humans, Nepal, Public Sector, Rural Population}, issn = {2058-4563}, doi = {10.14236/jhi.v24i2.862}, author = {Raut, Anant and Yarbrough, Chase and Singh, Vivek and Gauchan, Bikash and Citrin, David and Verma, Varun and Hawley, Jessica and Schwarz, Dan and Harsha Bangura, Alex and Shrestha, Biplav and Schwarz, Ryan and Adhikari, Mukesh and Maru, Duncan} } @article {1236, title = {Development and Deployment of the OpenMRS-Ebola Electronic Health Record System for an Ebola Treatment Center in Sierra Leone.}, journal = {J Med Internet Res}, volume = {19}, year = {2017}, month = {2017 08 21}, pages = {e294}, abstract = {

BACKGROUND: Stringent infection control requirements at Ebola treatment centers (ETCs), which are specialized facilities for isolating and treating Ebola patients, create substantial challenges for recording and reviewing patient information. During the 2014-2016 West African Ebola epidemic, paper-based data collection systems at ETCs compromised the quality, quantity, and confidentiality of patient data. Electronic health record (EHR) systems have the potential to address such problems, with benefits for patient care, surveillance, and research. However, no suitable software was available for deployment when large-scale ETCs opened as the epidemic escalated in 2014.

OBJECTIVE: We present our work on rapidly developing and deploying OpenMRS-Ebola, an EHR system for the Kerry Town ETC in Sierra Leone. We describe our experience, lessons learned, and recommendations for future health emergencies.

METHODS: We used the OpenMRS platform and Agile software development approaches to build OpenMRS-Ebola. Key features of our work included daily communications between the development team and ground-based operations team, iterative processes, and phased development and implementation. We made design decisions based on the restrictions of the ETC environment and regular user feedback. To evaluate the system, we conducted predeployment user questionnaires and compared the EHR records with duplicate paper records.

RESULTS: We successfully built OpenMRS-Ebola, a modular stand-alone EHR system with a tablet-based application for infectious patient wards and a desktop-based application for noninfectious areas. OpenMRS-Ebola supports patient tracking (registration, bed allocation, and discharge); recording of vital signs and symptoms; medication and intravenous fluid ordering and monitoring; laboratory results; clinician notes; and data export. It displays relevant patient information to clinicians in infectious and noninfectious zones. We implemented phase 1 (patient tracking; drug ordering and monitoring) after 2.5 months of full-time development. OpenMRS-Ebola was used for 112 patient registrations, 569 prescription orders, and 971 medication administration recordings. We were unable to fully implement phases 2 and 3 as the ETC closed because of a decrease in new Ebola cases. The phase 1 evaluation suggested that OpenMRS-Ebola worked well in the context of the rollout, and the user feedback was positive.

CONCLUSIONS: To our knowledge, OpenMRS-Ebola is the most comprehensive adaptable clinical EHR built for a low-resource setting health emergency. It is designed to address the main challenges of data collection in highly infectious environments that require robust infection prevention and control measures and it is interoperable with other electronic health systems. Although we built and deployed OpenMRS-Ebola more rapidly than typical software, our work highlights the challenges of having to develop an appropriate system during an emergency rather than being able to rapidly adapt an existing one. Lessons learned from this and previous emergencies should be used to ensure that a set of well-designed, easy-to-use, pretested health software is ready for quick deployment in future.

}, keywords = {electronic health records, Epidemics, Hemorrhagic Fever, Ebola, Humans, Infection Control, Sierra Leone, Telemedicine}, issn = {1438-8871}, doi = {10.2196/jmir.7881}, author = {Oza, Shefali and Jazayeri, Darius and Teich, Jonathan M and Ball, Ellen and Nankubuge, Patricia Alexandra and Rwebembera, Job and Wing, Kevin and Sesay, Alieu Amara and Kanter, Andrew S and Ramos, Glauber D and Walton, David and Cummings, Rachael and Checchi, Francesco and Fraser, Hamish S} } @article {1285, title = {Long-Term Outcomes of Aflibercept Treatment for Neovascular Age-Related Macular Degeneration in a Clinical Setting.}, journal = {Am J Ophthalmol}, volume = {174}, year = {2017}, month = {2017 Feb}, pages = {160-168}, abstract = {

PURPOSE: To report 2-year treatment outcomes with intravitreal aflibercept for neovascular age-related macular degeneration (nAMD) in routine clinical practice.

DESIGN: Retrospective, nonrandomized, interventional case series.

METHODS: Retrospective analysis of electronic medical record (EMR) notes (OpenEyes) and paper case notes and review of spectral-domain optical coherence tomography (SDOCT) imaging of patients with consecutively treated eyes with previously untreated nAMD. Patients were commenced on aflibercept injections in 1 or both eyes from October 1, 2013 to December 31, 2013. Data including age, sex, visual acuity (VA) measured on Early Treatment Diabetic Retinopathy Study charts, injection episodes, and complications were recorded. Additionally, SDOCT data, including presence or absence of macular fluid and automated central subfield macular thickness (CSMT) at year 1 and 2, were recorded.

RESULTS: Of the 109 eyes of 102 patients treated, data from 94 eyes of 88 patients were available at 2-year follow-up (86\% of patients). In the analysis of 2-year outcomes, there were 58 women (65.9\%); the mean ({\textpm} standard deviation) age was 77.5 {\textpm} 8 years. Over the 2 years, these eyes received a median of 12 (mean, 11.4 {\textpm} 4) injections at a median of 100 (mean, 99.3 {\textpm} 5.3) weeks of follow-up. The mean VA changed from 55.9 {\textpm} 15 letters at baseline to 61.3 {\textpm} 16.9 letters (VA gain 5.4 letters) at 1 year and to 61 {\textpm} 17.1 letters (VA gain 5.1 {\textpm} 14.9 letters) at 2 years. The reduction in CSMT was 79~μm with absence of macular fluid in 72.7\% of the 88 eyes with SDOCT data available at 2-year follow-up.

CONCLUSIONS: The VA and SDOCT results compare favorably with outcomes seen in randomized controlled trials. The results suggest that good long-term outcomes can be achieved using aflibercept for nAMD in clinical settings.

}, keywords = {Aged, Dose-Response Relationship, Drug, Female, Fluorescein Angiography, Follow-Up Studies, Fundus Oculi, Humans, Intravitreal Injections, Macula Lutea, Macular Degeneration, Male, Receptors, Vascular Endothelial Growth Factor, Recombinant Fusion Proteins, Retinal Neovascularization, Retrospective Studies, Time Factors, Tomography, Optical Coherence, Treatment Outcome, Visual Acuity}, issn = {1879-1891}, doi = {10.1016/j.ajo.2016.09.038}, author = {Eleftheriadou, Maria and Vazquez-Alfageme, Clara and Citu, Cristina Maria and Crosby-Nwaobi, Roxanne and Sivaprasad, Sobha and Hykin, Philip and Hamilton, Robin D and Patel, Praveen J} } @article {1267, title = {Mapping for Health in Cameroon: Polio Legacy and Beyond.}, journal = {J Infect Dis}, volume = {216}, year = {2017}, month = {2017 07 01}, pages = {S337-S342}, abstract = {

During the poliovirus outbreak in Cameroon from October 2013 to April 2015, the Ministry of Public Health{\textquoteright}s Expanded Program on Immunization requested technical support to improve mapping of health district boundaries and health facility locations for more effective planning and analysis of polio program data. In December 2015, teams collected data on settlements, health facilities, and other features using smartphones. These data, combined with high-resolution satellite imagery, were used to create new health area and health district boundaries, providing the most accurate health sector administrative boundaries to date for Cameroon. The new maps are useful to and used by the polio program as well as other public health programs within Cameroon such as the District Health Information System and the Emergency Operations Center, demonstrating the value of the Global Polio Eradication Initiative{\textquoteright}s legacy.

}, keywords = {Cameroon, Geographic Information Systems, Humans, Immunization Programs, Poliomyelitis, Public Health, Public Health Surveillance, Smartphone}, issn = {1537-6613}, doi = {10.1093/infdis/jix008}, author = {Rosencrans, Louie C and Sume, Gerald E and Kouontchou, Jean-Christian and Voorman, Arend and Anokwa, Yaw and Fezeu, Maurice and Seaman, Vincent Y} } @article {1276, title = {A Mobile-Based Community Health Management Information System for Community Health Workers and Their Supervisors in 2 Districts of Zambia.}, journal = {Glob Health Sci Pract}, volume = {5}, year = {2017}, month = {2017 09 27}, pages = {486-494}, abstract = {

INTRODUCTION: Effective community health management information systems (C-HMIS) are important in low-resource countries that rely heavily on community-based health care providers. Zambia currently lacks a functioning C-HMIS to provide real-time, community-based health information from community health workers (CHWs) to health center staff and higher levels of the health system.

PROGRAM DESCRIPTION: We developed a C-HMIS mobile platform for use by CHWs providing integrated community case management (iCCM) services and their supervisors to address challenges of frequent stock-outs and inadequate supportive supervision of iCCM-trained CHWs. The platform used simple feature mobile phones on which were loaded the District Health Information System version 2 (DHIS2) software and Java 2 platform micro edition (J2ME) aggregation and tracker applications. This project was implemented in Chipata and Chadiza districts, which supported previous mHealth programs and had cellular coverage from all 3 major network carriers in Zambia. A total of 40 CHWs and 20 CHW supervisors received mobile phones with data bundles and training in the mobile application, after which they implemented the program over a period of 5.5 months, from February to mid-July 2016. CHWs used the mobile phones to submit data on iCCM cases seen, managed, and referred, as well as iCCM medical and diagnostic supplies received and dispensed. Using their mobile phones, the supervisors tracked CHWs{\textquoteright} reported cases with medicine consumption, sent CHWs feedback on their referrals, and received SMS reminders to set up mentorship sessions.

OBSERVATIONS: CHWs were able to use the mobile application to send weekly reports to health center supervisors on disease caseloads and medical commodities consumed, to make drug and supply requisitions, and to send pre-referral notices to health centers. Health center staff used the mobile system to provide feedback to CHWs on the case outcomes of referred patients and to receive automated monthly SMS reminders to invite CHWs to the facility for mentorship. District- and central-level staff were able to access community-level health data in real time using passwords.

LESSONS LEARNED: C-HMIS, using simple feature phones, was feasible and viable for the provision of real-time community-based health information to all levels of the health care system in Zambia, but smartphones, laptops, or desktop computers are needed to perform data analysis and visualization. Ongoing technical support is needed to address the hardware and software challenges CHWs face in their day-to-day interaction with the application on their mobile phones.

}, keywords = {Community Health Services, Community Health Workers, Delivery of Health Care, Health information systems, Humans, Mobile Applications, Referral and Consultation, Zambia}, issn = {2169-575X}, doi = {10.9745/GHSP-D-16-00275}, author = {Biemba, Godfrey and Chiluba, Boniface and Yeboah-Antwi, Kojo and Silavwe, Vichaels and Lunze, Karsten and Mwale, Rodgers K and Russpatrick, Scott and Hamer, Davidson H} } @article {1174, title = {A national standards-based assessment on functionality of electronic medical records systems used in {Kenyan} public-{Sector} health facilities}, journal = {International Journal of Medical Informatics}, volume = {97}, year = {2017}, pages = {68{\textendash}75}, abstract = {BACKGROUND: Variations in the functionality, content and form of electronic medical record systems (EMRs) challenge national roll-out of these systems as part of a national strategy to monitor HIV response. To enforce the EMRs minimum requirements for delivery of quality HIV services, the Kenya Ministry of Health (MoH) developed EMRs standards and guidelines. The standards guided the recommendation of EMRs that met a preset threshold for national roll-out. METHODS: Using a standards-based checklist, six review teams formed by the MoH EMRs Technical Working Group rated a total of 17 unique EMRs in 28 heath facilities selected by individual owners for their optimal EMR implementation. EMRs with an aggregate score of >=60\% against checklist criteria were identified by the MoH as suitable for upgrading and rollout to Kenyan public health facilities. RESULTS: In Kenya, existing EMRs scored highly in health information and reporting (mean score=71.8\%), followed by security, system features, core clinical information, and order entry criteria (mean score=58.1\%-55.9\%), and lowest against clinical decision support (mean score=17.6\%) and interoperability criteria (mean score=14.3\%). Four EMRs met the 60.0\% threshold: OpenMRS, IQ-Care, C-PAD and Funsoft. On the basis of the review, the MoH provided EMRs upgrade plans to owners of all the 17 systems reviewed. CONCLUSION: The standards-based review in Kenya represents an effort to determine level of conformance to the EMRs standards and prioritize EMRs for enhancement and rollout. The results support concentrated use of resources towards development of the four recommended EMRs. Further review should be conducted to determine the effect of the EMR-specific upgrade plans on the other 13 EMRs that participated in the review exercise.}, keywords = {Checklist, EMRs, Review, Standards}, issn = {1872-8243}, doi = {10.1016/j.ijmedinf.2016.09.013}, author = {Kang{\textquoteright}a, Samuel and Puttkammer, Nancy and Wanyee, Steven and Kimanga, Davies and Madrano, Jason and Muthee, Veronica and Odawo, Patrick and Sharma, Anjali and Oluoch, Tom and Robinson, Katherine and Kwach, James and Lober, William B.} } @article {1205, title = {Open-{Source} {Electronic} {Health} {Record} {Systems} for {Low}-{Resource} {Settings}: {Systematic} {Review}}, journal = {JMIR Medical Informatics}, volume = {5}, year = {2017}, abstract = {Background Despite the great impact of information and communication technologies on clinical practice and on the quality of health services, this trend has been almost exclusive to developed countries, whereas countries with poor resources suffer from many economic and social issues that have hindered the real benefits of electronic health (eHealth) tools. As a component of eHealth systems, electronic health records (EHRs) play a fundamental role in patient management and effective medical care services. Thus, the adoption of EHRs in regions with a lack of infrastructure, untrained staff, and ill-equipped health care providers is an important task. However, the main barrier to adopting EHR software in low- and middle-income countries is the cost of its purchase and maintenance, which highlights the open-source approach as a good solution for these underserved areas. Objective The aim of this study was to conduct a systematic review of open-source EHR systems based on the requirements and limitations of low-resource settings. Methods First, we reviewed existing literature on the comparison of available open-source solutions. In close collaboration with the University of Gondar Hospital, Ethiopia, we identified common limitations in poor resource environments and also the main requirements that EHRs should support. Then, we extensively evaluated the current open-source EHR solutions, discussing their strengths and weaknesses, and their appropriateness to fulfill a predefined set of features relevant for low-resource settings. Results The evaluation methodology allowed assessment of several key aspects of available solutions that are as follows: (1) integrated applications, (2) configurable reports, (3) custom reports, (4) custom forms, (5) interoperability, (6) coding systems, (7) authentication methods, (8) patient portal, (9) access control model, (10) cryptographic features, (11) flexible data model, (12) offline support, (13) native client, (14) Web client,(15) other clients, (16) code-based language, (17) development activity, (18) modularity, (19) user interface, (20) community support, and (21) customization. The quality of each feature is discussed for each of the evaluated solutions and a final comparison is presented. Conclusions There is a clear demand for open-source, reliable, and flexible EHR systems in low-resource settings. In this study, we have evaluated and compared five open-source EHR systems following a multidimensional methodology that can provide informed recommendations to other implementers, developers, and health care professionals. We hope that the results of this comparison can guide decision making when needing to adopt, install, and maintain an open-source EHR solution in low-resource settings.}, issn = {2291-9694}, doi = {10.2196/medinform.8131}, url = {https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5703976/}, author = {Syzdykova, Assel and Malta, Andr{\'e} and Zolfo, Maria and Diro, Ermias and Oliveira, Jos{\'e} Lu{\'\i}s} } @article {1202, title = {Open-source mobile digital platform for clinical trial data collection in low-resource settings}, journal = {BMJ innovations}, volume = {3}, year = {2017}, pages = {26{\textendash}31}, abstract = {BACKGROUND: Governments, universities and pan-African research networks are building durable infrastructure and capabilities for biomedical research in Africa. This offers the opportunity to adopt from the outset innovative approaches and technologies that would be challenging to retrofit into fully established research infrastructures such as those regularly found in high-income countries. In this context we piloted the use of a novel mobile digital health platform, designed specifically for low-resource environments, to support high-quality data collection in a clinical research study. OBJECTIVE: Our primary aim was to assess the feasibility of a using a mobile digital platform for clinical trial data collection in a low-resource setting. Secondarily, we sought to explore the potential benefits of such an approach. METHODS: The investigative site was a research institute in Nairobi, Kenya. We integrated an open-source platform for mobile data collection commonly used in the developing world with an open-source, standard platform for electronic data capture in clinical trials. The integration was developed using common data standards (Clinical Data Interchange Standards Consortium (CDISC) Operational Data Model), maximising the potential to extend the approach to other platforms. The system was deployed in a pharmacokinetic study involving healthy human volunteers. RESULTS: The electronic data collection platform successfully supported conduct of the study. Multidisciplinary users reported high levels of satisfaction with the mobile application and highlighted substantial advantages when compared with traditional paper record systems. The new system also demonstrated a potential for expediting data quality review. DISCUSSION AND CONCLUSIONS: This pilot study demonstrated the feasibility of using a mobile digital platform for clinical research data collection in low-resource settings. Sustainable scientific capabilities and infrastructure are essential to attract and support clinical research studies. Since many research structures in Africa are being developed anew, stakeholders should consider implementing innovative technologies and approaches.}, keywords = {clinical research, eSource, Global Health, mHealth, Reverse Innovations}, issn = {2055-642X}, doi = {10.1136/bmjinnov-2016-000164}, author = {van Dam, Joris and Omondi Onyango, Kevin and Midamba, Brian and Groosman, Nele and Hooper, Norman and Spector, Jonathan and Pillai, Goonaseelan Colin and Ogutu, Bernhards} } @article {1189, title = {Orchestrating differential data access for translational research: a pilot implementation}, journal = {BMC Medical Informatics and Decision Making}, volume = {17}, year = {2017}, month = {2017}, pages = {30}, abstract = {BACKGROUND: Translational researchers need robust IT solutions to access a range of data types, varying from public data sets to pseudonymised patient information with restricted access, provided on a case by case basis. The reason for this complication is that managing access policies to sensitive human data must consider issues of data confidentiality, identifiability, extent of consent, and data usage agreements. All these ethical, social and legal aspects must be incorporated into a differential management of restricted access to sensitive data. METHODS: In this paper we present a pilot system that uses several common open source software components in a novel combination to coordinate access to heterogeneous biomedical data repositories containing open data (open access) as well as sensitive data (restricted access) in the domain of biobanking and biosample research. Our approach is based on a digital identity federation and software to manage resource access entitlements. RESULTS: Open source software components were assembled and configured in such a way that they allow for different ways of restricted access according to the protection needs of the data. We have tested the resulting pilot infrastructure and assessed its performance, feasibility and reproducibility. CONCLUSIONS: Common open source software components are sufficient to allow for the creation of a secure system for differential access to sensitive data. The implementation of this system is exemplary for researchers facing similar requirements for restricted access data. Here we report experience and lessons learnt of our pilot implementation, which may be useful for similar use cases. Furthermore, we discuss possible extensions for more complex scenarios.}, isbn = {1472-6947}, doi = {10.1186/s12911-017-0424-6}, url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC5363029/}, author = {Brandizi, Marco and Melnichuk, Olga and Bild, Raffael and Kohlmayer, Florian and Rodriguez-Castro, Benedicto and Spengler, Helmut and Kuhn, Klaus A and Kuchinke, Wolfgang and Ohmann, Christian and Mustonen, Timo and Linden, Mikael and Nyr{\"o}nen, Tommi and Lappalainen, Ilkka and Brazma, Alvis and Sarkans, Ugis} } @article {1300, title = {QuPath: Open source software for digital pathology image analysis.}, journal = {Sci Rep}, volume = {7}, year = {2017}, month = {2017 12 04}, pages = {16878}, abstract = {

QuPath is new bioimage analysis software designed to meet the growing need for a user-friendly, extensible, open-source solution for digital pathology and whole slide image analysis. In addition to offering a comprehensive panel of tumor identification and high-throughput biomarker evaluation tools, QuPath provides researchers with powerful batch-processing and scripting functionality, and an extensible platform with which to develop and share new algorithms to analyze complex tissue images. Furthermore, QuPath{\textquoteright}s flexible design makes it suitable for a wide range of additional image analysis applications across biomedical research.

}, keywords = {Algorithms, Biomarkers, Tumor, Colonic Neoplasms, Humans, Image Interpretation, Computer-Assisted, Kaplan-Meier Estimate, Programmed Cell Death 1 Ligand 2 Protein, User-Computer Interface}, issn = {2045-2322}, doi = {10.1038/s41598-017-17204-5}, author = {Bankhead, Peter and Loughrey, Maurice B and Fern{\'a}ndez, Jos{\'e} A and Dombrowski, Yvonne and McArt, Darragh G and Dunne, Philip D and McQuaid, Stephen and Gray, Ronan T and Murray, Liam J and Coleman, Helen G and James, Jacqueline A and Salto-Tellez, Manuel and Hamilton, Peter W} } @article {1284, title = {Risk of Posterior Capsule Rupture During Cataract Surgery in Eyes With Previous Intravitreal Injections.}, journal = {Am J Ophthalmol}, volume = {177}, year = {2017}, month = {2017 May}, pages = {77-80}, abstract = {

PURPOSE: To investigate the risk of posterior capsular rupture (PCR) during cataract surgery in eyes with previous intravitreal injection (IVI).

DESIGN: Retrospective cohort study.

METHODS: The Moorfields Patient Administrative System and OpenEyes electronic databases were used to study all cataract surgery procedures undertaken between January 1, 2012 and August 31, 2015 in the Moorfields main and satellite sites. Clinical data were anonymized and extracted, including prior occurrence and number of intravitreal injections. Logistic regression was performed with the Hosmer-Lemeshow test for goodness of fit to generate odds ratios for possible risk factors.

RESULTS: In total, 62 994 cataract surgery procedures were undertaken over the study period, of which 1035 (1.64\%) were in eyes with previous intravitreal injection(s). PCR occurred in 650 (1.04\%) eyes. After logistic regression, prior intravitreal injection was associated with an increased risk of PCR (P~= .037), with an odds ratio of 1.66. The number of prior injections, indication for injections, and service undertaking the surgery were not associated with increased risk of PCR (P > .1).

CONCLUSIONS: Eyes with previous IVI have a higher risk of PCR. This is not affected by number of previous injections, indication for injections, or the specialty undertaking the surgery.

}, keywords = {Aged, Cataract Extraction, Female, Follow-Up Studies, Glucocorticoids, Humans, Incidence, Intraoperative Complications, Intravitreal Injections, Lens Capsule, Crystalline, Male, Posterior Capsular Rupture, Ocular, Retrospective Studies, Risk Factors, United Kingdom, Visual Acuity}, issn = {1879-1891}, doi = {10.1016/j.ajo.2017.02.006}, author = {Shalchi, Zaid and Okada, Mali and Whiting, Chris and Hamilton, Robin} } @article {1237, title = {Automating indicator data reporting from health facility EMR to a national aggregate data system in Kenya: An Interoperability field-test using OpenMRS and DHIS2.}, journal = {Online J Public Health Inform}, volume = {8}, year = {2016}, month = {2016}, pages = {e188}, abstract = {

Developing countries are increasingly strengthening national health information systems (HIS) for evidence-based decision-making. However, the inability to report indicator data automatically from electronic medical record systems (EMR) hinders this process. Data are often printed and manually re-entered into aggregate reporting systems. This affects data completeness, accuracy, reporting timeliness, and burdens staff who support routine indicator reporting from patient-level data. After conducting a feasibility test to exchange indicator data from Open Medical Records System (OpenMRS) to District Health Information System version 2 (DHIS2), we conducted a field test at a health facility in Kenya. We configured a field-test DHIS2 instance, similar to the Kenya Ministry of Health (MOH) DHIS2, to receive HIV care and treatment indicator data and the KenyaEMR, a customized version of OpenMRS, to generate and transmit the data from a health facility. After training facility staff how to send data using DHIS2 reporting module, we compared completeness, accuracy and timeliness of automated indicator reporting with facility monthly reports manually entered into MOH DHIS2. All 45 data values in the automated reporting process were 100\% complete and accurate while in manual entry process, data completeness ranged from 66.7\% to 100\% and accuracy ranged from 33.3\% to 95.6\% for seven months (July 2013-January 2014). Manual tally and entry process required at least one person to perform each of the five reporting activities, generating data from EMR and manual entry required at least one person to perform each of the three reporting activities, while automated reporting process had one activity performed by one person. Manual tally and entry observed in October 2013 took 375 minutes. Average time to generate data and manually enter into DHIS2 was over half an hour (M=32.35 mins, SD=0.29) compared to less than a minute for automated submission (M=0.19 mins, SD=0.15). The results indicate that indicator data sent electronically from OpenMRS-based EMR at a health facility to DHIS2 improves data completeness, eliminates transcription errors and delays in reporting, and reduces the reporting burden on human resources. This increases availability of quality indicator data using available resources to facilitate monitoring service delivery and measuring progress towards set goals.

}, issn = {1947-2579}, doi = {10.5210/ojphi.v8i2.6722}, author = {Kariuki, James M and Manders, Eric-Jan and Richards, Janise and Oluoch, Tom and Kimanga, Davies and Wanyee, Steve and Kwach, James O and Santas, Xenophon} } @article {1288, title = {Collaborative analysis of multi-gigapixel imaging data using Cytomine.}, journal = {Bioinformatics}, volume = {32}, year = {2016}, month = {2016 05 01}, pages = {1395-401}, abstract = {

MOTIVATION: Collaborative analysis of massive imaging datasets is essential to enable scientific discoveries.

RESULTS: We developed Cytomine to foster active and distributed collaboration of multidisciplinary teams for large-scale image-based studies. It uses web development methodologies and machine learning in order to readily organize, explore, share and analyze (semantically and quantitatively) multi-gigapixel imaging data over the internet. We illustrate how it has been used in several biomedical applications.

AVAILABILITY AND IMPLEMENTATION: Cytomine (http://www.cytomine.be/) is freely available under an open-source license from http://github.com/cytomine/ A documentation wiki (http://doc.cytomine.be) and a demo server (http://demo.cytomine.be) are also available.

CONTACT: info@cytomine.be

SUPPLEMENTARY INFORMATION: Supplementary data are available at Bioinformatics online.

}, keywords = {Image Interpretation, Computer-Assisted, Internet, Software, Statistics as Topic}, issn = {1367-4811}, doi = {10.1093/bioinformatics/btw013}, author = {Mar{\'e}e, Rapha{\"e}l and Rollus, Lo{\"\i}c and St{\'e}vens, Benjamin and Hoyoux, Renaud and Louppe, Gilles and Vandaele, R{\'e}my and Begon, Jean-Michel and Kainz, Philipp and Geurts, Pierre and Wehenkel, Louis} } @article {1167, title = {{ConoSurf}: {Open}-source 3D scanning system based on a conoscopic holography device for acquiring surgical surfaces}, journal = {The international journal of medical robotics + computer assisted surgery: MRCAS}, year = {2016}, abstract = {BACKGROUND: A difficulty in computer-assisted interventions is acquiring the patient{\textquoteright}s anatomy intraoperatively. Standard modalities have several limitations: low image quality (ultrasound), radiation exposure (computed tomography) or high costs (magnetic resonance imaging). An alternative approach uses a tracked pointer; however, the pointer causes tissue deformation and requires sterilizing. Recent proposals, utilizing a tracked conoscopic holography device, have shown promising results without the previously mentioned drawbacks. METHODS: We have developed an open-source software system that enables real-time surface scanning using a conoscopic holography device and a wide variety of tracking systems, integrated into pre-existing and well-supported software solutions. RESULTS: The mean target registration error of point measurements was 1.46~mm. For a quick guidance scan, surface reconstruction improved the surface registration error compared with point-set registration. CONCLUSIONS: We have presented a system enabling real-time surface scanning using a tracked conoscopic holography device. Results show that it can be useful for acquiring the patient{\textquoteright}s anatomy during surgery.}, issn = {1478-596X}, doi = {10.1002/rcs.1788}, author = {Brudfors, Mikael and Garc{\'\i}a-V{\'a}zquez, Ver{\'o}nica and Ses{\'e}-Lucio, Bego{\~n}a and Marinetto, Eugenio and Desco, Manuel and Pascau, Javier} } @article {1127, title = {Evaluating Open-Source Full-Text Search Engines for Matching ICD-10 Codes.}, journal = {Stud Health Technol Inform}, volume = {226}, year = {2016}, month = {2016}, pages = {127-30}, abstract = {

This research presents the results of evaluating multiple free, open-source engines on matching ICD-10 diagnostic codes via full-text searches. The study investigates what it takes to get an accurate match when searching for a specific diagnostic code. For each code the evaluation starts by extracting the words that make up its text and continues with building full-text search queries from the combinations of these words. The queries are then run against all the ICD-10 codes until a match indicates the code in question as a match with the highest relative score. This method identifies the minimum number of words that must be provided in order for the search engines choose the desired entry. The engines analyzed include a popular Java-based full-text search engine, a lightweight engine written in JavaScript which can even execute on the user{\textquoteright}s browser, and two popular open-source relational database management systems.

}, issn = {0926-9630}, author = {Jurc{\u a}u, Daniel-Alexandru and Stoicu-Tivadar, Vasile} } @article {1137, title = {How open science helps researchers succeed.}, journal = {Elife}, volume = {5}, year = {2016}, month = {2016}, abstract = {

Open access, open data, open source and other open scholarship practices are growing in popularity and necessity. However, widespread adoption of these practices has not yet been achieved. One reason is that researchers are uncertain about how sharing their work will affect their careers. We review literature demonstrating that open research is associated with increases in citations, media attention, potential collaborators, job opportunities and funding opportunities. These findings are evidence that open research practices bring significant benefits to researchers relative to more traditional closed practices.

}, issn = {2050-084X}, doi = {10.7554/eLife.16800}, author = {McKiernan, Erin C and Bourne, Philip E and Brown, C Titus and Buck, Stuart and Kenall, Amye and Lin, Jennifer and McDougall, Damon and Nosek, Brian A and Ram, Karthik and Soderberg, Courtney K and Spies, Jeffrey R and Thaney, Kaitlin and Updegrove, Andrew and Woo, Kara H and Yarkoni, Tal} } @article {1131, title = {Increasing the impact of medical image computing using community-based open-access hackathons: The NA-MIC and 3D Slicer experience.}, journal = {Med Image Anal}, year = {2016}, month = {2016 Jul 7}, abstract = {

The National Alliance for Medical Image Computing (NA-MIC) was launched in 2004 with the goal of investigating and developing an open source software infrastructure for the extraction of information and knowledge from medical images using computational methods. Several leading research and engineering groups participated in this effort that was funded by the US National Institutes of Health through a variety of infrastructure grants. This effort transformed 3D Slicer from an internal, Boston-based, academic research software application into a professionally maintained, robust, open source platform with an international leadership and developer and user communities. Critical improvements to the widely used underlying open source libraries and tools-VTK, ITK, CMake, CDash, DCMTK-were an additional consequence of this effort. This project has contributed to close to a thousand peer-reviewed publications and a growing portfolio of US and international funded efforts expanding the use of these tools in new medical computing applications every year. In this editorial, we discuss what we believe are gaps in the way medical image computing is pursued today; how a well-executed research platform can enable discovery, innovation and reproducible science ("Open Science"); and how our quest to build such a software platform has evolved into a productive and rewarding social engineering exercise in building an open-access community with a shared vision.

}, issn = {1361-8423}, doi = {10.1016/j.media.2016.06.035}, author = {Kapur, Tina and Pieper, Steve and Fedorov, Andriy and Fillion-Robin, J-C and Halle, Michael and O{\textquoteright}Donnell, Lauren and Lasso, Andras and Ungi, Tamas and Pinter, Csaba and Finet, Julien and Pujol, Sonia and Jagadeesan, Jayender and Tokuda, Junichi and Norton, Isaiah and Estepar, Raul San Jose and Gering, David and Aerts, Hugo J W L and Jakab, Marianna and Hata, Nobuhiko and Ibanez, Luiz and Blezek, Daniel and Miller, Jim and Aylward, Stephen and Grimson, W Eric L and Fichtinger, Gabor and Wells, William M and Lorensen, William E and Schroeder, Will and Kikinis, Ron} } @article {1151, title = {A long journey to short abbreviations: developing an open-source framework for clinical abbreviation recognition and disambiguation (CARD).}, journal = {J Am Med Inform Assoc}, year = {2016}, month = {2016 Aug 18}, abstract = {

OBJECTIVE: The goal of this study was to develop a practical framework for recognizing and disambiguating clinical abbreviations, thereby improving current clinical natural language processing (NLP) systems{\textquoteright} capability to handle abbreviations in clinical narratives.

METHODS: We developed an open-source framework for clinical abbreviation recognition and disambiguation (CARD) that leverages our previously developed methods, including: (1) machine learning based approaches to recognize abbreviations from a clinical corpus, (2) clustering-based semiautomated methods to generate possible senses of abbreviations, and (3) profile-based word sense disambiguation methods for clinical abbreviations. We applied CARD to clinical corpora from Vanderbilt University Medical Center (VUMC) and generated 2 comprehensive sense inventories for abbreviations in discharge summaries and clinic visit notes. Furthermore, we developed a wrapper that integrates CARD with MetaMap, a widely used general clinical NLP system.Results and Conclusion CARD detected 27 317 and 107 303 distinct abbreviations from discharge summaries and clinic visit notes, respectively. Two sense inventories were constructed for the 1000 most frequent abbreviations in these 2 corpora. Using the sense inventories created from discharge summaries, CARD achieved an F1 score of 0.755 for identifying and disambiguating all abbreviations in a corpus from the VUMC discharge summaries, which is superior to MetaMap and Apache{\textquoteright}s clinical Text Analysis Knowledge Extraction System (cTAKES). Using additional external corpora, we also demonstrated that the MetaMap-CARD wrapper improved MetaMap{\textquoteright}s performance in recognizing disorder entities in clinical notes. The CARD framework, 2 sense inventories, and the wrapper for MetaMap are publicly available at https://sbmi.uth.edu/ccb/resources/abbreviation.htm We believe the CARD framework can be a valuable resource for improving abbreviation identification in clinical NLP systems.

}, issn = {1527-974X}, doi = {10.1093/jamia/ocw109}, author = {Wu, Yonghui and Denny, Joshua C and Rosenbloom, S Trent and Miller, Randolph A and Giuse, Dario A and Wang, Lulu and Blanquicett, Carmelo and Soysal, Ergin and Xu, Jun and Xu, Hua} } @article {1170, title = {Open {Source} {Drug} {Discovery}: {Highly} {Potent} {Antimalarial} {Compounds} {Derived} from the {Tres} {Cantos} {Arylpyrroles}}, journal = {ACS central science}, volume = {2}, year = {2016}, pages = {687{\textendash}701}, abstract = {The development of new antimalarial compounds remains a pivotal part of the strategy for malaria elimination. Recent large-scale phenotypic screens have provided a wealth of potential starting points for hit-to-lead campaigns. One such public set is explored, employing an open source research mechanism in which all data and ideas were shared in real time, anyone was able to participate, and patents were not sought. One chemical subseries was found to exhibit oral activity but contained a labile ester that could not be replaced without loss of activity, and the original hit exhibited remarkable sensitivity to minor structural change. A second subseries displayed high potency, including activity within gametocyte and liver stage assays, but at the cost of low solubility. As an open source research project, unexplored avenues are clearly identified and may be explored further by the community; new findings may be cumulatively added to the present work.}, issn = {2374-7943}, doi = {10.1021/acscentsci.6b00086}, author = {Williamson, Alice E. and Ylioja, Paul M. and Robertson, Murray N. and Antonova-Koch, Yevgeniya and Avery, Vicky and Baell, Jonathan B. and Batchu, Harikrishna and Batra, Sanjay and Burrows, Jeremy N. and Bhattacharyya, Soumya and Calderon, Felix and Charman, Susan A. and Clark, Julie and Crespo, Benigno and Dean, Matin and Debbert, Stefan L. and Delves, Michael and Dennis, Adelaide S. M. and Deroose, Frederik and Duffy, Sandra and Fletcher, Sabine and Giaever, Guri and Hallyburton, Irene and Gamo, Francisco-Javier and Gebbia, Marinella and Guy, R. Kiplin and Hungerford, Zoe and Kirk, Kiaran and Lafuente-Monasterio, Maria J. and Lee, Anna and Meister, Stephan and Nislow, Corey and Overington, John P. and Papadatos, George and Patiny, Luc and Pham, James and Ralph, Stuart A. and Ruecker, Andrea and Ryan, Eileen and Southan, Christopher and Srivastava, Kumkum and Swain, Chris and Tarnowski, Matthew J. and Thomson, Patrick and Turner, Peter and Wallace, Iain M. and Wells, Timothy N. C. and White, Karen and White, Laura and Willis, Paul and Winzeler, Elizabeth A. and Wittlin, Sergio and Todd, Matthew H.} } @article {1121, title = {Open Source Drug Discovery with the Malaria Box Compound Collection for Neglected Diseases and Beyond.}, journal = {PLoS Pathog}, volume = {12}, year = {2016}, month = {2016 Jul}, pages = {e1005763}, abstract = {

A major cause of the paucity of new starting points for drug discovery is the lack of interaction between academia and industry. Much of the global resource in biology is present in universities, whereas the focus of medicinal chemistry is still largely within industry. Open source drug discovery, with sharing of information, is clearly a first step towards overcoming this gap. But the interface could especially be bridged through a scale-up of open sharing of physical compounds, which would accelerate the finding of new starting points for drug discovery. The Medicines for Malaria Venture Malaria Box is a collection of over 400 compounds representing families of structures identified in phenotypic screens of pharmaceutical and academic libraries against the Plasmodium falciparum malaria parasite. The set has now been distributed to almost 200 research groups globally in the last two years, with the only stipulation that information from the screens is deposited in the public domain. This paper reports for the first time on 236 screens that have been carried out against the Malaria Box and compares these results with 55 assays that were previously published, in a format that allows a meta-analysis of the combined dataset. The combined biochemical and cellular assays presented here suggest mechanisms of action for 135 (34\%) of the compounds active in killing multiple life-cycle stages of the malaria parasite, including asexual blood, liver, gametocyte, gametes and insect ookinete stages. In addition, many compounds demonstrated activity against other pathogens, showing hits in assays with 16 protozoa, 7 helminths, 9 bacterial and mycobacterial species, the dengue fever mosquito vector, and the NCI60 human cancer cell line panel of 60 human tumor cell lines. Toxicological, pharmacokinetic and metabolic properties were collected on all the compounds, assisting in the selection of the most promising candidates for murine proof-of-concept experiments and medicinal chemistry programs. The data for all of these assays are presented and analyzed to show how outstanding leads for many indications can be selected. These results reveal the immense potential for translating the dispersed expertise in biological assays involving human pathogens into drug discovery starting points, by providing open access to new families of molecules, and emphasize how a small additional investment made to help acquire and distribute compounds, and sharing the data, can catalyze drug discovery for dozens of different indications. Another lesson is that when multiple screens from different groups are run on the same library, results can be integrated quickly to select the most valuable starting points for subsequent medicinal chemistry efforts.

}, issn = {1553-7374}, doi = {10.1371/journal.ppat.1005763}, author = {Van Voorhis, Wesley C and Adams, John H and Adelfio, Roberto and Ahyong, Vida and Akabas, Myles H and Alano, Pietro and Alday, Aintzane and Alem{\'a}n Resto, Yesmalie and Alsibaee, Aishah and Alzualde, Ainhoa and Andrews, Katherine T and Avery, Simon V and Avery, Vicky M and Ayong, Lawrence and Baker, Mark and Baker, Stephen and Ben Mamoun, Choukri and Bhatia, Sangeeta and Bickle, Quentin and Bounaadja, Lotfi and Bowling, Tana and Bosch, J{\"u}rgen and Boucher, Lauren E and Boyom, Fabrice F and Brea, Jose and Brennan, Marian and Burton, Audrey and Caffrey, Conor R and Camarda, Grazia and Carrasquilla, Manuela and Carter, Dee and Belen Cassera, Maria and Chih-Chien Cheng, Ken and Chindaudomsate, Worathad and Chubb, Anthony and Colon, Beatrice L and Col{\'o}n-L{\'o}pez, Daisy D and Corbett, Yolanda and Crowther, Gregory J and Cowan, Noemi and D{\textquoteright}Alessandro, Sarah and Le Dang, Na and Delves, Michael and DeRisi, Joseph L and Du, Alan Y and Duffy, Sandra and Abd El-Salam El-Sayed, Shimaa and Ferdig, Michael T and Fern{\'a}ndez Robledo, Jos{\'e} A and Fidock, David A and Florent, Isabelle and Fokou, Patrick V T and Galstian, Ani and Gamo, Francisco Javier and Gokool, Suzanne and Gold, Ben and Golub, Todd and Goldgof, Gregory M and Guha, Rajarshi and Guiguemde, W Armand and Gural, Nil and Guy, R Kiplin and Hansen, Michael A E and Hanson, Kirsten K and Hemphill, Andrew and Hooft van Huijsduijnen, Rob and Horii, Takaaki and Horrocks, Paul and Hughes, Tyler B and Huston, Christopher and Igarashi, Ikuo and Ingram-Sieber, Katrin and Itoe, Maurice A and Jadhav, Ajit and Naranuntarat Jensen, Amornrat and Jensen, Laran T and Jiang, Rays H Y and Kaiser, Annette and Keiser, Jennifer and Ketas, Thomas and Kicka, Sebastien and Kim, Sunyoung and Kirk, Kiaran and Kumar, Vidya P and Kyle, Dennis E and Lafuente, Maria Jose and Landfear, Scott and Lee, Nathan and Lee, Sukjun and Lehane, Adele M and Li, Fengwu and Little, David and Liu, Liqiong and Llin{\'a}s, Manuel and Loza, Maria I and Lubar, Aristea and Lucantoni, Leonardo and Lucet, Isabelle and Maes, Louis and Mancama, Dalu and Mansour, Nuha R and March, Sandra and McGowan, Sheena and Medina Vera, Iset and Meister, Stephan and Mercer, Luke and Mestres, Jordi and Mfopa, Alvine N and Misra, Raj N and Moon, Seunghyun and Moore, John P and Morais Rodrigues da Costa, Francielly and M{\"u}ller, Joachim and Muriana, Arantza and Nakazawa Hewitt, Stephen and Nare, Bakela and Nathan, Carl and Narraidoo, Nathalie and Nawaratna, Sujeevi and Ojo, Kayode K and Ortiz, Diana and Panic, Gordana and Papadatos, George and Parapini, Silvia and Patra, Kailash and Pham, Ngoc and Prats, Sarah and Plouffe, David M and Poulsen, Sally-Ann and Pradhan, Anupam and Quevedo, Celia and Quinn, Ronald J and Rice, Christopher A and Abdo Rizk, Mohamed and Ruecker, Andrea and St Onge, Robert and Salgado Ferreira, Rafaela and Samra, Jasmeet and Robinett, Natalie G and Schlecht, Ulrich and Schmitt, Marjorie and Silva Villela, Filipe and Silvestrini, Francesco and Sinden, Robert and Smith, Dennis A and Soldati, Thierry and Spitzm{\"u}ller, Andreas and Stamm, Serge Maximilian and Sullivan, David J and Sullivan, William and Suresh, Sundari and Suzuki, Brian M and Suzuki, Yo and Swamidass, S Joshua and Taramelli, Donatella and Tchokouaha, Lauve R Y and Theron, Anjo and Thomas, David and Tonissen, Kathryn F and Townson, Simon and Tripathi, Abhai K and Trofimov, Valentin and Udenze, Kenneth O and Ullah, Imran and Vallieres, Cindy and Vigil, Edgar and Vinetz, Joseph M and Voong Vinh, Phat and Vu, Hoan and Watanabe, Nao-Aki and Weatherby, Kate and White, Pamela M and Wilks, Andrew F and Winzeler, Elizabeth A and Wojcik, Edward and Wree, Melanie and Wu, Wesley and Yokoyama, Naoaki and Zollo, Paul H A and Abla, Nada and Blasco, Benjamin and Burrows, Jeremy and Laleu, Beno{\^\i}t and Leroy, Didier and Spangenberg, Thomas and Wells, Timothy and Willis, Paul A} } @article {1153, title = {Performing Quantitative Imaging Acquisition, Analysis and Visualization Using the Best of Open Source and Commercial Software Solutions.}, journal = {Microsc Microanal}, volume = {22}, year = {2016}, month = {2016 Jul}, pages = {2064-2065}, abstract = {

A challenge in any imaging laboratory, especially one that uses modern techniques, is to achieve a sustainable and productive balance between using open source and commercial software to perform quantitative image acquisition, analysis and visualization. In addition to considering the expense of software licensing, one must consider factors such as the quality and usefulness of the software{\textquoteright}s support, training and documentation. Also, one must consider the reproducibility with which multiple people generate results using the same software to perform the same analysis, how one may distribute their methods to the community using the software and the potential for achieving automation to improve productivity.

}, issn = {1435-8115}, author = {Shenoy, Shailesh M} } @article {1163, title = {{RABIX}: {AN} {OPEN}-{SOURCE} {WORKFLOW} {EXECUTOR} {SUPPORTING} {RECOMPUTABILITY} {AND} {INTEROPERABILITY} {OF} {WORKFLOW} {DESCRIPTIONS}}, journal = {Pacific Symposium on Biocomputing. Pacific Symposium on Biocomputing}, volume = {22}, year = {2016}, pages = {154{\textendash}165}, abstract = {As biomedical data has become increasingly easy to generate in large quantities, the methods used to analyze it have proliferated rapidly. Reproducible and reusable methods are required to learn from large volumes of data reliably. To address this issue, numerous groups have developed workflow specifications or execution engines, which provide a framework with which to perform a sequence of analyses. One such specification is the Common Workflow Language, an emerging standard which provides a robust and flexible framework for describing data analysis tools and workflows. In addition, reproducibility can be furthered by executors or workflow engines which interpret the specification and enable additional features, such as error logging, file organization, optim1izations to computation and job scheduling, and allow for easy computing on large volumes of data. To this end, we have developed the Rabix Executor, an open-source workflow engine for the purposes of improving reproducibility through reusability and interoperability of workflow descriptions.}, issn = {2335-6936}, author = {Kaushik, Gaurav and Ivkovic, Sinisa and Simonovic, Janko and Tijanic, Nebojsa and Davis-Dusenbery, Brandi and Kural, Deniz} } @article {1161, title = {Reliability of infarct volumetry: {Its} relevance and the improvement by a software-assisted approach}, journal = {Journal of Cerebral Blood Flow and Metabolism: Official Journal of the International Society of Cerebral Blood Flow and Metabolism}, year = {2016}, abstract = {Despite the efficacy of neuroprotective approaches in animal models of stroke, their translation has so far failed from bench to bedside. One reason is presumed to be a low quality of preclinical study design, leading to bias and a low a priori power. In this study, we propose that the key read-out of experimental stroke studies, the volume of the ischemic damage as commonly measured by free-handed planimetry of TTC-stained brain sections, is subject to an unrecognized low inter-rater and test-retest reliability with strong implications for statistical power and bias. As an alternative approach, we suggest a simple, open-source, software-assisted method, taking advantage of automatic-thresholding techniques. The validity and the improvement of reliability by an automated method to tMCAO infarct volumetry are demonstrated. In addition, we show the probable consequences of increased reliability for precision, p-values, effect inflation, and power calculation, exemplified by a systematic analysis of experimental stroke studies published in the year 2015. Our study reveals an underappreciated quality problem in translational stroke research and suggests that software-assisted infarct volumetry might help to improve reproducibility and therefore the robustness of bench to bedside translation.}, keywords = {experimental stroke, Image analysis, middle cerebral artery occlusion, Neuroprotection, power}, issn = {1559-7016}, doi = {10.1177/0271678X16681311}, author = {Friedl{\"a}nder, Felix and Bohmann, Ferdinand and Brunkhorst, Max and Chae, Ju-Hee and Devraj, Kavi and K{\"o}hler, Yvette and Kraft, Peter and Kuhn, Hannah and Lucaciu, Alexandra and Luger, Sebastian and Pfeilschifter, Waltraud and Sadler, Rebecca and Liesz, Arthur and Scholtyschik, Karolina and Stolz, Leonie and Vutukuri, Rajkumar and Brunkhorst, Robert} } @article {1156, title = {{SimVascular}: {An} {Open} {Source} {Pipeline} for {Cardiovascular} {Simulation}}, journal = {Annals of Biomedical Engineering}, year = {2016}, abstract = {Patient-specific cardiovascular simulation has become a paradigm in cardiovascular research and is emerging as a powerful tool in basic, translational and clinical research. In this paper we discuss the recent development of a fully open-source SimVascular software package, which provides a complete pipeline from medical image data segmentation to patient-specific blood flow simulation and analysis. This package serves as a research tool for cardiovascular modeling and simulation, and has contributed to numerous advances in personalized medicine, surgical planning and medical device design. The SimVascular software has recently been refactored and expanded to enhance functionality, usability, efficiency and accuracy of image-based patient-specific modeling tools. Moreover, SimVascular previously required several licensed components that hindered new user adoption and code management and our recent developments have replaced these commercial components to create a fully open source pipeline. These developments foster advances in cardiovascular modeling research, increased collaboration, standardization of methods, and a growing developer community.}, keywords = {Hemodynamics, Image-based CFD, open-source, Patient-specific modeling}, issn = {1573-9686}, doi = {10.1007/s10439-016-1762-8}, author = {Updegrove, Adam and Wilson, Nathan M. and Merkow, Jameson and Lan, Hongzhi and Marsden, Alison L. and Shadden, Shawn C.} } @article {1159, title = {{SIproc}: an open-source biomedical data processing platform for large hyperspectral images}, journal = {The Analyst}, year = {2016}, abstract = {There has recently been significant interest within the vibrational spectroscopy community to apply quantitative spectroscopic imaging techniques to histology and clinical diagnosis. However, many of the proposed methods require collecting spectroscopic images that have a similar region size and resolution to the corresponding histological images. Since spectroscopic images contain significantly more spectral samples than traditional histology, the resulting data sets can approach hundreds of gigabytes to terabytes in size. This makes them difficult to store and process, and the tools available to researchers for handling large spectroscopic data sets are limited. Fundamental mathematical tools, such as MATLAB, Octave, and SciPy, are extremely powerful but require that the data be stored in fast memory. This memory limitation becomes impractical for even modestly sized histological images, which can be hundreds of gigabytes in size. In this paper, we propose an open-source toolkit designed to perform out-of-core processing of hyperspectral images. By taking advantage of graphical processing unit (GPU) computing combined with adaptive data streaming, our software alleviates common workstation memory limitations while achieving better performance than existing applications.}, issn = {1364-5528}, doi = {10.1039/c6an02082h}, author = {Berisha, Sebastian and Chang, Shengyuan and Saki, Sam and Daeinejad, Davar and He, Ziqi and Mankar, Rupali and Mayerich, David} } @article {1165, title = {A systematic literature review of open source software quality assessment models}, journal = {SpringerPlus}, volume = {5}, year = {2016}, pages = {1936}, abstract = {BACKGROUND: Many open source software (OSS) quality assessment models are proposed and available in the literature. However, there is little or no adoption of these models in practice. In order to guide the formulation of newer models so they can be acceptable by practitioners, there is need for clear discrimination of the existing models based on their specific properties. Based on this, the aim of this study is to perform a systematic literature review to investigate the properties of the existing OSS quality assessment models by classifying them with respect to their quality characteristics, the methodology they use for assessment, and their domain of application so as to guide the formulation and development of newer models. Searches in IEEE Xplore, ACM, Science Direct, Springer and Google Search is performed so as to retrieve all relevant primary studies in this regard. Journal and conference papers between the year 2003 and 2015 were considered since the first known OSS quality model emerged in 2003. RESULTS: A total of 19 OSS quality assessment model papers were selected. To select these models we have developed assessment criteria to evaluate the quality of the existing studies. Quality assessment models are classified into five categories based on the quality characteristics they possess namely: single-attribute, rounded category, community-only attribute, non-community attribute as well as the non-quality in use models. Our study reflects that software selection based on hierarchical structures is found to be the most popular selection method in the existing OSS quality assessment models. Furthermore, we found that majority (47\%) of the existing models do not specify any domain of application. CONCLUSIONS: In conclusion, our study will be a valuable contribution to the community and helps the quality assessment model developers in formulating newer models and also to the practitioners (software evaluators) in selecting suitable OSS in the midst of alternatives.}, keywords = {Analysis, Community, ISO 25010, open source software, Quality assessment models}, doi = {10.1186/s40064-016-3612-4}, author = {Adewumi, Adewole and Misra, Sanjay and Omoregbe, Nicholas and Crawford, Broderick and Soto, Ricardo} } @article {1112, title = {TACIT: An open-source text analysis, crawling, and interpretation tool.}, journal = {Behav Res Methods}, year = {2016}, month = {2016 Mar 4}, abstract = {

As human activity and interaction increasingly take place online, the digital residues of these activities provide a valuable window into a range of psychological and social processes. A great deal of progress has been made toward utilizing these opportunities; however, the complexity of managing and analyzing the quantities of data currently available has limited both the types of analysis used and the number of researchers able to make use of these data. Although fields such as computer science have developed a range of techniques and methods for handling these difficulties, making use of those tools has often required specialized knowledge and programming experience. The Text Analysis, Crawling, and Interpretation Tool (TACIT) is designed to bridge this gap by providing an intuitive tool and interface for making use of state-of-the-art methods in text analysis and large-scale data management. Furthermore, TACIT is implemented as an open, extensible, plugin-driven architecture, which will allow other researchers to extend and expand these capabilities as new methods become available.

}, issn = {1554-3528}, doi = {10.3758/s13428-016-0722-4}, author = {Dehghani, Morteza and Johnson, Kate M and Garten, Justin and Boghrati, Reihane and Hoover, Joe and Balasubramanian, Vijayan and Singh, Anurag and Shankar, Yuvarani and Pulickal, Linda and Rajkumar, Aswin and Parmar, Niki Jitendra} } @article {1139, title = {VirusMapper: open-source nanoscale mapping of viral architecture through super-resolution microscopy.}, journal = {Sci Rep}, volume = {6}, year = {2016}, month = {2016}, pages = {29132}, abstract = {

The nanoscale molecular assembly of mammalian viruses during their infectious life cycle remains poorly understood. Their small dimensions, generally bellow the 300nm diffraction limit of light microscopes, has limited most imaging studies to electron microscopy. The recent development of super-resolution (SR) light microscopy now allows the visualisation of viral structures at resolutions of tens of nanometers. In addition, these techniques provide the added benefit of molecular specific labelling and the capacity to investigate viral structural dynamics using live-cell microscopy. However, there is a lack of robust analytical tools that allow for precise mapping of viral structure within the setting of infection. Here we present an open-source analytical framework that combines super-resolution imaging and na{\"\i}ve single-particle analysis to generate unbiased molecular models. This tool, VirusMapper, is a high-throughput, user-friendly, ImageJ-based software package allowing for automatic statistical mapping of conserved multi-molecular structures, such as viral substructures or intact viruses. We demonstrate the usability of VirusMapper by applying it to SIM and STED images of vaccinia virus in isolation and when engaged with host cells. VirusMapper allows for the generation of accurate, high-content, molecular specific virion models and detection of nanoscale changes in viral architecture.

}, issn = {2045-2322}, doi = {10.1038/srep29132}, author = {Gray, Robert D M and Beerli, Corina and Pereira, Pedro Matos and Scherer, Kathrin Maria and Samolej, Jerzy and Bleck, Christopher Karl Ernst and Mercer, Jason and Henriques, Ricardo} } @article {992, title = {A case study in open source innovation: developing the Tidepool Platform for interoperability in type 1 diabetes management.}, journal = {J Am Med Inform Assoc}, year = {2015}, month = {2015 Sep 2}, abstract = {

OBJECTIVE: Develop a device-agnostic cloud platform to host diabetes device data and catalyze an ecosystem of software innovation for type 1 diabetes (T1D) management.

MATERIALS AND METHODS: An interdisciplinary team decided to establish a nonprofit company, Tidepool, and build open-source software.

RESULTS: Through a user-centered design process, the authors created a software platform, the Tidepool Platform, to upload and host T1D device data in an integrated, device-agnostic fashion, as well as an application ("app"), Blip, to visualize the data. Tidepool{\textquoteright}s software utilizes the principles of modular components, modern web design including REST APIs and JavaScript, cloud computing, agile development methodology, and robust privacy and security.

DISCUSSION: By consolidating the currently scattered and siloed T1D device data ecosystem into one open platform, Tidepool can improve access to the data and enable new possibilities and efficiencies in T1D clinical care and research. The Tidepool Platform decouples diabetes apps from diabetes devices, allowing software developers to build innovative apps without requiring them to design a unique back-end (e.g., database and security) or unique ways of ingesting device data. It allows people with T1D to choose to use any preferred app regardless of which device(s) they use.

CONCLUSION: The authors believe that the Tidepool Platform can solve two current problems in the T1D device landscape: 1) limited access to T1D device data and 2) poor interoperability of data from different devices. If proven effective, Tidepool{\textquoteright}s open source, cloud model for health data interoperability is applicable to other healthcare use cases.

}, issn = {1527-974X}, doi = {10.1093/jamia/ocv104}, author = {Neinstein, Aaron and Wong, Jenise and Look, Howard and Arbiter, Brandon and Quirk, Kent and McCanne, Steve and Sun, Yao and Blum, Michael and Adi, Saleh} } @article {1190, title = {caTissue Suite to OpenSpecimen: developing an extensible, open source, web-based biobanking management system}, journal = {Journal of biomedical informatics}, volume = {57}, year = {2015}, month = {2015/10/}, pages = {456 - 464}, abstract = {The National Cancer Institute (NCI) Cancer Biomedical Informatics Grid({\textregistered}) (caBIG({\textregistered})) program established standards and best practices for biorepository data management by creating an infrastructure to propagate biospecimen resource sharing while maintaining data integrity and security. caTissue suite, a biospecimen data management software tool, has evolved from this effort. More recently, the caTissue suite continues to evolve as an open source initiative known as OpenSpecimen. The essential functionality of OpenSpecimen includes the capture and representation of highly granular, hierarchically-structured data for biospecimen processing, quality assurance, tracking, and annotation. Ideal for multi-user and multi-site biorepository environments, OpenSpecimen permits role-based access to specific sets of data operations through a user-interface designed to accommodate varying workflows and unique user needs. The software is interoperable, both syntactically and semantically, with an array of other bioinformatics tools given its integration of standard vocabularies thus enabling research involving biospecimens. End-users are encouraged to share their day-to-day experiences in working with the application, thus providing to the community board insight into the needs and limitations which need be addressed. Users are also requested to review and validate new features through group testing environments and mock screens. Through this user interaction, application flexibility and interoperability have been recognized as necessary developmental focuses essential for accommodating diverse adoption scenarios and biobanking workflows to catalyze advances in biomedical research and operations. Given the diversity of biobanking practices and workforce roles, efforts have been made consistently to maintain robust data granularity while aiding user accessibility, data discoverability, and security within and across applications by providing a lower learning curve in using OpenSpecimen. Iterative development and testing cycles provide continuous maintenance and up-to-date capabilities for this freely available, open-access, web-based software application that is globally-adopted at over 25 institutions.}, isbn = {1532-04641532-0480}, doi = {10.1016/j.jbi.2015.08.020}, url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC4772150/}, author = {McIntosh, Leslie D and Sharma, Mukesh K and Mulvihill, David and Gupta, Snehil and Juehne, Anthony and George, Bijoy and Khot, Suhas B and Kaushal, Atul and Watson, Mark A and Nagarajan, Rakesh} } @article {970, title = {The ImageJ ecosystem: An open platform for biomedical image analysis.}, journal = {Mol Reprod Dev}, year = {2015}, month = {2015 Jul 7}, abstract = {

Technology in microscopy advances rapidly, enabling increasingly affordable, faster, and more precise quantitative biomedical imaging, which necessitates correspondingly more-advanced image processing and analysis techniques. A wide range of software is available-from commercial to academic, special-purpose to Swiss army knife, small to large-but a key characteristic of software that is suitable for scientific inquiry is its accessibility. Open-source software is ideal for scientific endeavors because it can be freely inspected, modified, and redistributed; in particular, the open-software platform ImageJ has had a huge impact on the life sciences, and continues to do so. From its inception, ImageJ has grown significantly due largely to being freely available and its vibrant and helpful user community. Scientists as diverse as interested hobbyists, technical assistants, students, scientific staff, and advanced biology researchers use ImageJ on a daily basis, and exchange knowledge via its dedicated mailing list. Uses of ImageJ range from data visualization and teaching to advanced image processing and statistical analysis. The software{\textquoteright}s extensibility continues to attract biologists at all career stages as well as computer scientists who wish to effectively implement specific image-processing algorithms. In this review, we use the ImageJ project as a case study of how open-source software fosters its suites of software tools, making multitudes of image-analysis technology easily accessible to the scientific community. We specifically explore what makes ImageJ so popular, how it impacts the life sciences, how it inspires other projects, and how it is self-influenced by coevolving projects within the ImageJ ecosystem. Mol. Reprod. Dev. 2015. {\textcopyright} 2015 Wiley Periodicals, Inc.

}, issn = {1098-2795}, doi = {10.1002/mrd.22489}, author = {Schindelin, Johannes and Rueden, Curtis T and Hiner, Mark C and Eliceiri, Kevin W} } @article {1292, title = {Innovative Technological Approach to Ebola Virus Disease Outbreak Response in Nigeria Using the Open Data Kit and Form Hub Technology.}, journal = {PLoS One}, volume = {10}, year = {2015}, month = {2015}, pages = {e0131000}, abstract = {

The recent outbreak of Ebola Virus Disease (EVD) in West Africa has ravaged many lives. Effective containment of this outbreak relies on prompt and effective coordination and communication across various interventions; early detection and response being critical to successful control. The use of information and communications technology (ICT) in active surveillance has proved to be effective but its use in Ebola outbreak response has been limited. Due to the need for timeliness in reporting and communication for early discovery of new EVD cases and promptness in response; it became imperative to empower the response team members with technologies and solutions which would enable smooth and rapid data flow. The Open Data Kit and Form Hub technology were used in combination with the Dashboard technology and ArcGIS mapping for follow up of contacts, identification of cases, case investigation and management and also for strategic planning during the response. A remarkable improvement was recorded in the reporting of daily follow-up of contacts after the deployment of the integrated real time technology. The turnaround time between identification of symptomatic contacts and evacuation to the isolation facility and also for receipt of laboratory results was reduced and informed decisions could be taken by all concerned. Accountability in contact tracing was ensured by the use of a GPS enabled device. The use of innovative technologies in the response of the EVD outbreak in Nigeria contributed significantly to the prompt control of the outbreak and containment of the disease by providing a valuable platform for early warning and guiding early actions.

}, keywords = {Disease Outbreaks, Ebolavirus, Epidemiological Monitoring, Female, Hemorrhagic Fever, Ebola, Humans, Male, Medical Informatics, Nigeria}, issn = {1932-6203}, doi = {10.1371/journal.pone.0131000}, author = {Tom-Aba, Daniel and Olaleye, Adeniyi and Olayinka, Adebola Tolulope and Nguku, Patrick and Waziri, Ndadilnasiya and Adewuyi, Peter and Adeoye, Olawunmi and Oladele, Saliu and Adeseye, Aderonke and Oguntimehin, Olukayode and Shuaib, Faisal} } @article {979, title = {medplot: a web application for dynamic summary and analysis of longitudinal medical data based on R.}, journal = {PLoS One}, volume = {10}, year = {2015}, month = {2015}, pages = {e0121760}, abstract = {

In biomedical studies the patients are often evaluated numerous times and a large number of variables are recorded at each time-point. Data entry and manipulation of longitudinal data can be performed using spreadsheet programs, which usually include some data plotting and analysis capabilities and are straightforward to use, but are not designed for the analyses of complex longitudinal data. Specialized statistical software offers more flexibility and capabilities, but first time users with biomedical background often find its use difficult. We developed medplot, an interactive web application that simplifies the exploration and analysis of longitudinal data. The application can be used to summarize, visualize and analyze data by researchers that are not familiar with statistical programs and whose knowledge of statistics is limited. The summary tools produce publication-ready tables and graphs. The analysis tools include features that are seldom available in spreadsheet software, such as correction for multiple testing, repeated measurement analyses and flexible non-linear modeling of the association of the numerical variables with the outcome. medplot is freely available and open source, it has an intuitive graphical user interface (GUI), it is accessible via the Internet and can be used within a web browser, without the need for installing and maintaining programs locally on the user{\textquoteright}s computer. This paper describes the application and gives detailed examples describing how to use the application on real data from a clinical study including patients with early Lyme borreliosis.

}, issn = {1932-6203}, doi = {10.1371/journal.pone.0121760}, author = {Ahlin, {\v C}rt and Stupica, Da{\v s}a and Strle, Franc and Lusa, Lara} } @article {974, title = {Omics Metadata Management Software (OMMS).}, journal = {Bioinformation}, volume = {11}, year = {2015}, month = {2015}, pages = {165-72}, abstract = {Next-generation sequencing projects have underappreciated information management tasks requiring detailed attention to specimen curation, nucleic acid sample preparation and sequence production methods required for downstream data processing, comparison, interpretation, sharing and reuse. The few existing metadata management tools for genome-based studies provide weak curatorial frameworks for experimentalists to store and manage idiosyncratic, project-specific information, typically offering no automation supporting unified naming and numbering conventions for sequencing production environments that routinely deal with hundreds, if not thousands of samples at a time. Moreover, existing tools are not readily interfaced with bioinformatics executables, (e.g., BLAST, Bowtie2, custom pipelines). Our application, the Omics Metadata Management Software (OMMS), answers both needs, empowering experimentalists to generate intuitive, consistent metadata, and perform analyses and information management tasks via an intuitive web-based interface. Several use cases with short-read sequence datasets are provided to validate installation and integrated function, and suggest possible methodological road maps for prospective users. Provided examples highlight possible OMMS workflows for metadata curation, multistep analyses, and results management and downloading. The OMMS can be implemented as a stand alone-package for individual laboratories, or can be configured for webbased deployment supporting geographically-dispersed projects. The OMMS was developed using an open-source software base, is flexible, extensible and easily installed and executed. The OMMS can be obtained at http://omms.sandia.gov.

AVAILABILITY: The OMMS can be obtained at http://omms.sandia.gov.

}, issn = {0973-2063}, doi = {10.6026/97320630011165}, author = {Perez-Arriaga, Martha O and Wilson, Susan and Williams, Kelly P and Schoeniger, Joseph and Waymire, Russel L and Powell, Amy Jo} } @article {976, title = {Open Drug Discovery Toolkit (ODDT): a new open-source player in the drug discovery field.}, journal = {J Cheminform}, volume = {7}, year = {2015}, month = {2015}, pages = {26}, abstract = {

BACKGROUND: There has been huge progress in the open cheminformatics field in both methods and software development. Unfortunately, there has been little effort to unite those methods and software into one package. We here describe the Open Drug Discovery Toolkit (ODDT), which aims to fulfill the need for comprehensive and open source drug discovery software.

RESULTS: The Open Drug Discovery Toolkit was developed as a free and open source tool for both computer aided drug discovery (CADD) developers and researchers. ODDT reimplements many state-of-the-art methods, such as machine learning scoring functions (RF-Score and NNScore) and wraps other external software to ease the process of developing CADD pipelines. ODDT is an out-of-the-box solution designed to be easily customizable and extensible. Therefore, users are strongly encouraged to extend it and develop new methods. We here present three use cases for ODDT in common tasks in computer-aided drug discovery.

CONCLUSION: Open Drug Discovery Toolkit is released on a permissive 3-clause BSD license for both academic and industrial use. ODDT{\textquoteright}s source code, additional examples and documentation are available on GitHub (https://github.com/oddt/oddt).

}, issn = {1758-2946}, doi = {10.1186/s13321-015-0078-2}, author = {W{\'o}jcikowski, Maciej and Zielenkiewicz, Piotr and Siedlecki, Pawel} } @article {996, title = {Open source in cachexia?}, journal = {J Cachexia Sarcopenia Muscle}, volume = {6}, year = {2015}, month = {2015 Mar}, pages = {112-3}, issn = {2190-5991}, doi = {10.1002/jcsm.12013}, author = {Stemmler, Britta and Argil{\'e}s, Joseph M} } @article {995, title = {OpenHELP (Heidelberg laparoscopy phantom): development of an open-source surgical evaluation and training tool.}, journal = {Surg Endosc}, year = {2015}, month = {2015 Feb 12}, abstract = {

BACKGROUND: Apart from animal testing and clinical trials, surgical research and laparoscopic training mainly rely on phantoms. The aim of this project was to design a phantom with realistic anatomy and haptic characteristics, modular design and easy reproducibility. The phantom was named open-source Heidelberg laparoscopic phantom (OpenHELP) and serves as an open-source platform.

METHODS: The phantom was based on an anonymized CT scan of a male patient. The anatomical structures were segmented to obtain digital three-dimensional models of the torso and the organs. The digital models were materialized via rapid prototyping. One flexible, using an elastic abdominal wall, and one rigid method, using a plastic shell, to simulate pneumoperitoneum were developed. Artificial organ production was carried out sequentially starting from raw gypsum models to silicone molds to final silicone casts. The reproduction accuracy was exemplarily evaluated for ten silicone rectum models by comparing the digital 3D surface of the original rectum with CT scan by calculating the root mean square error of surface variations. Haptic realism was also evaluated to find the most realistic silicone compositions on a visual analog scale (VAS, 0-10).

RESULTS: The rigid and durable plastic torso and soft silicone organs of the abdominal cavity were successfully produced. A simulation of pneumoperitoneum could be created successfully by both methods. The reproduction accuracy of ten silicone rectum models showed an average root mean square error of 2.26 (0-11.48) mm. Haptic realism revealed an average value on a VAS of 7.25 (5.2-9.6) for the most realistic rectum.

CONCLUSION: The OpenHELP phantom proved to be feasible and accurate. The phantom was consecutively applied frequently in the field of computer-assisted surgery at our institutions and is accessible as an open-source project at www.open-cas.org for the academic community.

}, issn = {1432-2218}, doi = {10.1007/s00464-015-4094-0}, author = {Kenngott, H G and W{\"u}nscher, J J and Wagner, M and Preukschas, A and Wekerle, A L and Neher, P and Suwelack, S and Speidel, S and Nickel, F and Oladokun, D and Maier-Hein, L and Dillmann, R and Meinzer, H P and M{\"u}ller-Stich, B P} } @article {1098, title = {Open-source health information technology: A case study of electronic medical records}, journal = {Health Policy and Technology}, volume = {4}, year = {2015}, pages = {14{\textendash}28}, author = {Safadi, Hani and Chan, David and Dawes, Martin and Roper, Mark and Faraj, Samer} } @article {1067, title = {Open-Source Radiation Exposure Extraction Engine (RE3) with Patient-Specific Outlier Detection.}, journal = {J Digit Imaging}, year = {2015}, month = {2015 Dec 7}, abstract = {

We present an open-source, picture archiving and communication system (PACS)-integrated radiation exposure extraction engine (RE3) that provides study-, series-, and slice-specific data for automated monitoring of computed tomography (CT) radiation exposure. RE3 was built using open-source components and seamlessly integrates with the PACS. RE3 calculations of dose length product (DLP) from the Digital imaging and communications in medicine (DICOM) headers showed high agreement (R (2) = 0.99) with the vendor dose pages. For study-specific outlier detection, RE3 constructs robust, automatically updating multivariable regression models to predict DLP in the context of patient gender and age, scan length, water-equivalent diameter (D w), and scanned body volume (SBV). As proof of concept, the model was trained on 811 CT chest, abdomen + pelvis (CAP) exams and 29 outliers were detected. The continuous variables used in the outlier detection model were scan length (R (2) = 0.45), D w (R (2) = 0.70), SBV (R (2) = 0.80), and age (R (2) = 0.01). The categorical variables were gender (male average 1182.7 {\textpm} 26.3 and female 1047.1 {\textpm} 26.9~mGy~cm) and pediatric status (pediatric average 710.7 {\textpm} 73.6~mGy~cm and adult 1134.5 {\textpm} 19.3~mGy~cm).

}, issn = {1618-727X}, doi = {10.1007/s10278-015-9852-y}, author = {Weisenthal, Samuel J and Folio, Les and Kovacs, William and Seff, Ari and Derderian, Vana and Summers, Ronald M and Yao, Jianhua} } @article {999, title = {Role of OpenEHR as an open source solution for the regional modelling of patient data in obstetrics.}, journal = {J Biomed Inform}, volume = {55}, year = {2015}, month = {2015 Jun}, pages = {174-87}, abstract = {

This work investigates, whether openEHR with its reference model, archetypes and templates is suitable for the digital representation of demographic as well as clinical data. Moreover, it elaborates openEHR as a tool for modelling Hospital Information Systems on a regional level based on a national logical infrastructure. OpenEHR is a dual model approach developed for the modelling of Hospital Information Systems enabling semantic interoperability. A holistic solution to this represents the use of dual model based Electronic Healthcare Record systems. Modelling data in the field of obstetrics is a challenge, since different regions demand locally specific information for the process of treatment. Smaller health units in developing countries like Brazil or Malaysia, which until recently handled automatable processes like the storage of sensitive patient data in paper form, start organizational reconstruction processes. This archetype proof-of-concept investigation has tried out some elements of the openEHR methodology in cooperation with a health unit in Colombo, Brazil. Two legal forms provided by the Brazilian Ministry of Health have been analyzed and classified into demographic and clinical data. LinkEHR-Ed editor was used to read, edit and create archetypes. Results show that 33 clinical and demographic concepts, which are necessary to cover data demanded by the Unified National Health System, were identified. Out of the concepts 61\% were reused and 39\% modified to cover domain requirements. The detailed process of reuse, modification and creation of archetypes is shown. We conclude that, although a major part of demographic and clinical patient data were already represented by existing archetypes, a significant part required major modifications. In this study openEHR proved to be a highly suitable tool in the modelling of complex health data. In combination with LinkEHR-Ed software it offers user-friendly and highly applicable tools, although the complexity built by the vast specifications requires expert networks to define generally excepted clinical models. Finally, this project has pointed out main benefits enclosing high coverage of obstetrics data on the Clinical Knowledge Manager, simple modelling, and wide network and support using openEHR. Moreover, barriers described are enclosing the allocation of clinical content to respective archetypes, as well as stagnant adaption of changes on the Clinical Knowledge Manager leading to redundant efforts in data contribution that need to be addressed in future works.

}, issn = {1532-0480}, doi = {10.1016/j.jbi.2015.04.004}, author = {Pahl, Christina and Zare, Mojtaba and Nilashi, Mehrbakhsh and de Faria Borges, Marco Aur{\'e}lio and Weingaertner, Daniel and Detschew, Vesselin and Supriyanto, Eko and Ibrahim, Othman} } @article {1064, title = {Software to Facilitate Remote Sensing Data Access for Disease Early Warning Systems.}, journal = {Environ Model Softw}, volume = {74}, year = {2015}, month = {2015 Dec 1}, pages = {247-257}, abstract = {

Satellite remote sensing produces an abundance of environmental data that can be used in the study of human health. To support the development of early warning systems for mosquito-borne diseases, we developed an open-source, client based software application to enable the Epidemiological Applications of Spatial Technologies (EASTWeb). Two major design decisions were full automation of the discovery, retrieval and processing of remote sensing data from multiple sources, and making the system easily modifiable in response to changes in data availability and user needs. Key innovations that helped to achieve these goals were the implementation of a software framework for data downloading and the design of a scheduler that tracks the complex dependencies among multiple data processing tasks and makes the system resilient to external errors. EASTWeb has been successfully applied to support forecasting of West Nile virus outbreaks in the United States and malaria epidemics in the Ethiopian highlands.

}, issn = {1364-8152}, doi = {10.1016/j.envsoft.2015.07.006}, author = {Liu, Yi and Hu, Jiameng and Snell-Feikema, Isaiah and VanBemmel, Michael S and Lamsal, Aashis and Wimberly, Michael C} } @article {980, title = {Virtualization of Open-Source Secure Web Services to Support Data Exchange in a Pediatric Critical Care Research Network.}, journal = {J Am Med Inform Assoc}, year = {2015}, month = {2015 Mar 21}, abstract = {

OBJECTIVES: To examine the feasibility of deploying a virtual web service for sharing data within a research network, and to evaluate the impact on data consistency and quality.

MATERIAL AND METHODS: Virtual machines (VMs) encapsulated an open-source, semantically and syntactically interoperable secure web service infrastructure along with a shadow database. The VMs were deployed to 8 Collaborative Pediatric Critical Care Research Network Clinical Centers.

RESULTS: Virtual web services could be deployed in hours. The interoperability of the web services reduced format misalignment from 56\% to 1\% and demonstrated that 99\% of the data consistently transferred using the data dictionary and 1\% needed human curation.

CONCLUSIONS: Use of virtualized open-source secure web service technology could enable direct electronic abstraction of data from hospital databases for research purposes.

}, issn = {1527-974X}, doi = {10.1093/jamia/ocv009}, author = {Frey, Lewis J and Sward, Katherine A and Newth, Christopher Jl and Khemani, Robinder G and Cryer, Martin E and Thelen, Julie L and Enriquez, Rene and Shaoyu, Su and Pollack, Murray M and Harrison, Rick E and Meert, Kathleen L and Berg, Robert A and Wessel, David L and Shanley, Thomas P and Dalton, Heidi and Carcillo, Joseph and Jenkins, Tammara L and Dean, J Michael} } @article {946, title = {An Albanian open source telemedicine platform.}, journal = {Telemed J E Health}, volume = {20}, year = {2014}, month = {2014 Jul}, pages = {673-7}, abstract = {

INTRODUCTION: The use of open source technologies to create collaboration platforms can produce huge advantages with small investment.

MATERIALS AND METHODS: We set up a telemedicine network for a healthcare district with typical centralization issues of developing countries. Our network was built using broadband Internet connection, and the digital divide in rural areas was reduced by means of wireless Internet connection. A software infrastructure was deployed on the network to implement the collaboration platform among different healthcare facilities.

RESULTS: We obtained an integrated platform with modest investment in hardware and operating systems and no costs for application software. Messaging, content management, information sharing, and videoconferencing are among the available services of the infrastructure. Furthermore, open source software is managed and continuously updated by active communities, making it possible to obtain systems similar to commercial ones in terms of quality and reliability.

CONCLUSIONS: As the use of free software in public administration is being widely promoted across the European Union, our experience may provide an example to implement similar infrastructures in the field of healthcare and welfare.

}, issn = {1556-3669}, doi = {10.1089/tmj.2013.0239}, author = {Zangara, Gianluca and Valentino, Francesca and Spinelli, Gaetano and Valenza, Mario and Marcheggiani, Angelo and Di Blasi, Francesco} } @article {932, title = {Automated Detection and Analysis of Ca(2+) Sparks in x-y Image Stacks Using a Thresholding Algorithm Implemented within the Open-Source Image Analysis Platform ImageJ.}, journal = {Biophys J}, volume = {106}, year = {2014}, month = {2014 Feb 4}, pages = {566-76}, abstract = {

Previous studies have used analysis of Ca(2+) sparks extensively to investigate both normal and pathological Ca(2+) regulation in cardiac myocytes. The great majority of these studies used line-scan confocal imaging. In part, this is because the development of open-source software for automatic detection of Ca(2+) sparks in line-scan images has greatly simplified data analysis. A disadvantage of line-scan imaging is that data are collected from a single row of pixels, representing only a small fraction of the cell, and in many instances x-y confocal imaging is preferable. However, the limited availability of software for Ca(2+) spark analysis in two-dimensional x-y image stacks presents an obstacle to its wider application. This study describes the development and characterization of software to enable automatic detection and analysis of Ca(2+) sparks within x-y image stacks, implemented as a plugin within the open-source image analysis platform ImageJ. The program includes methods to enable precise identification of cells within confocal fluorescence images, compensation for changes in background fluorescence, and options that allow exclusion of events based on spatial characteristics.

}, issn = {1542-0086}, doi = {10.1016/j.bpj.2013.12.040}, author = {Steele, Elliot M and Steele, Derek S} } @article {941, title = {Computational Pathology and Telepathology: SY05-1 TELECYTOLOGY TC AND TELEPATHOLOGY TP IN UNDERSERVED COUNTRIES TOY OR TOOL?}, journal = {Pathology}, volume = {46 Suppl 2}, year = {2014}, month = {2014 Oct}, pages = {S7}, abstract = {

BACKGROUND: Pathologic anatomic practice is an essential part of medical practice even in low-income countries. It is an extraordinarily helpful tool in finding therapeutic decisions, monitoring therapeutic processes and in academic teaching students and residents in understanding aetiology and morphology of infectious and neoplastic diseases. However, all countries worldwide are suffering from a shortage of experienced surgical pathologists providing an adequate service to the clinics. Training pathologists is a time-consuming procedure and it takes more than one decade. Therefore new techniques have to be utilized to overcome the gap of human resources in medical fields in these countries. Telepathology and telecytology may be effective tools to transfer both knowledge and experience to any place in the world with simple technical equipment using the Internet. We evaluate the benefit of TP and TC on three projects, with different educational levels of the local pathologists.

MATERIAL AND APPROACH: In 2002 Cambodian pathologists started off with telepathology using iPath Network as an open-source system. Up to now more than 5600 cases have been submitted to experts. In 2007 a Tanzanian hospital started telepathology and solved more than 3600 cases. In 2010 an Afghan group of medical experts started using TP and submitted more than 1600 cases. The latest 100 cases of each group were analysed with respect to (1) the organs concerned; (2) the diagnostic techniques used such as histology vs cytology; (3) complexity of the diagnostic question; (4) information quality and quantity; (5) diagnostic concordance between primary diagnosis and the experts opinion; and (6) diagnostic accuracy of the experts diagnoses in order to evaluate the benefit of the projects.

RESULTS AND CONCLUSION: Telepathology services are ranging from a triage of diagnostic assessment of benign vs malignant to academic-scientific services including teaching and research facilities. Breast pathology is predominant in Afghanistan, whereas cervix pathology is the leading issue in Tanzania, and in Cambodia soft tissue/bone pathology plays the major role. In more than 50\% cytology was applied in Afghanistan and only 10\% and 5\% in Cambodia and Tanzania respectively. The complexity of diagnostic questions differs from Cambodia using TP for confirmation in most cases, to Tanzania and Afghanistan using TP mostly for primary diagnosis. Diagnostic concordance depending on both the experience of local pathologists and the technical facilities of the laboratory was high in Cambodia, followed by Tanzania and Afghanistan. Diagnostic accuracy corresponding with the quantity and quality of information followed the same pattern. The success of TP projects can be measured by the number of TP sessions and by the sustainability of projects. Our TP/TC projects in those countries with limited resources are effective tools in improving medical health care.

}, keywords = {iPath, open source software, telepathology}, issn = {1465-3931}, doi = {10.1097/01.PAT.0000454068.32934.09}, author = {Stauch, Gerhard and Miringa, Angelica and Raoufi, Rokai and Vathana, Cchut Serey and Hetzmann, Sophia and Hinsch, Nora and Dalquen, Peter and Voelker, Ullrich and Kunze, Dietmar} } @article {1044, title = {The development of an application for data privacy by applying an audit repository based on IHE ATNA.}, journal = {Stud Health Technol Inform}, volume = {198}, year = {2014}, month = {2014}, pages = {219-25}, abstract = {

It is necessary to optimize workflows and communication between institutions involved in patients{\textquoteright} treatment to improve quality and efficiency of the German healthcare. To achieve these in the Metropolregion Rhein-Neckar, a personal, cross-institutional patient record (PEPA) is used. Given the immense sensitivity of health-related information saved in the PEPA, it is imperative to obey the data protection regulations in Germany. One important aspect is the logging of access to personal health data and all other safety-related events. For gathering audit information, the IHE profile ATNA can be used, because it provides a flexible and standardized infrastructure. There are already existing solutions for gathering the audit information based on ATNA. In this article one solution (OpenATNA) is evaluated, which uses the method of evaluation defined by Peter Baumgartner. In addition, a user interface for a privacy officer is necessary to support the examination of the audit information. Therefore, we will describe a method to develop an application in Liferay (an OpenSource enterprise portal project) which supports examinations on the gathered audit information.

}, keywords = {Computer Security, Confidentiality, Data Curation, Databases, Factual, electronic health records, Guideline Adherence, Guidelines as Topic, Information Storage and Retrieval, Internationality, Medical Audit, Medical Record Linkage, Software, Software Design}, issn = {0926-9630}, author = {Bresser, Laura and K{\"o}hler, Steffen and Schwaab, Christoph} } @article {954, title = {Free/Libre open source software in health care: a review.}, journal = {Healthc Inform Res}, volume = {20}, year = {2014}, month = {2014 Jan}, pages = {11-22}, abstract = {

OBJECTIVES: To assess the current state of the art and the contribution of Free/Libre Open Source Software in health care (FLOSS-HC).

METHODS: The review is based on a narrative review of the scientific literature as well as sources in the context of FLOSS-HC available through the Internet. All relevant available sources have been integrated into the MedFLOSS database and are freely available to the community.

RESULTS: The literature review reveals that publications about FLOSS-HC are scarce. The largest part of information about FLOSS-HC is available on dedicated websites and not in the academic literature. There are currently FLOSS alternatives available for nearly every specialty in health care. Maturity and quality varies considerably and there is little information available on the percentage of systems that are actually used in health care delivery.

CONCLUSIONS: The global impact of FLOSS-HC is still very limited and no figures on the penetration and usage of FLOSS-HC are available. However, there has been a considerable growth in the last 5 to 10 years. While there where only few systems available a decade ago, in the meantime many systems got available (e.g., more than 300 in the MedFLOSS database). While FLOSS concepts play an important role in most IT related sectors (e.g., telecommunications, embedded devices) the healthcare industry is lagging behind this trend.

}, issn = {2093-3681}, doi = {10.4258/hir.2014.20.1.11}, author = {Karopka, Thomas and Schmuhl, Holger and Demski, Hans} } @article {1033, title = {Making cytological diagnoses on digital images using the iPath network.}, journal = {Acta Cytol}, volume = {58}, year = {2014}, month = {2014}, pages = {453-60}, abstract = {

BACKGROUND: The iPath telemedicine platform Basel is mainly used for histological and cytological consultations, but also serves as a valuable learning tool.

AIM: To study the level of accuracy in making diagnoses based on still images achieved by experienced cytopathologists, to identify limiting factors, and to provide a cytological image series as a learning set.

METHOD: Images from 167 consecutive cytological specimens of different origin were uploaded on the iPath platform and evaluated by four cytopathologists. Only wet-fixed and well-stained specimens were used. The consultants made specific diagnoses and categorized each as benign, suspicious or malignant.

RESULTS: For all consultants, specificity and sensitivity regarding categorized diagnoses were 83-92 and 85-93\%, respectively; the overall accuracy was 88-90\%. The interobserver agreement was substantial (κ = 0.791). The lowest rate of concordance was achieved in urine and bladder washings and in the identification of benign lesions.

CONCLUSION: Using a digital image set for diagnostic purposes implies that even under optimal conditions the accuracy rate will not exceed to 80-90\%, mainly because of lacking supportive immunocytochemical or molecular tests. This limitation does not disqualify digital images for teleconsulting or as a learning aid. The series of images used for the study are open to the public at http://pathorama.wordpress.com/extragenital-cytology-2013/.

}, keywords = {Adolescent, Adult, Aged, Aged, 80 and over, Child, Child, Preschool, Computers, Handheld, Cytodiagnosis, Diagnosis, Differential, Female, Humans, Hyperplasia, Infant, Male, Metaplasia, Middle Aged, Neoplasms, Observer Variation, Reproducibility of Results, Sensitivity and Specificity, Telemedicine}, issn = {0001-5547}, doi = {10.1159/000369241}, author = {Dalquen, Peter and Savic Prince, Spasenija and Spieler, Peter and Kunze, Dietmar and Neumann, Heinrich and Eppenberger-Castori, Serenella and Adams, Heiner and Glatz, Katharina and Bubendorf, Lukas} } @article {943, title = {Open-source electronic data capture system offered increased accuracy and cost-effectiveness compared with paper methods in Africa.}, journal = {J Clin Epidemiol}, year = {2014}, month = {2014 Aug 15}, abstract = {

OBJECTIVES: Existing electronic data capture options are often financially unfeasible in resource-poor settings or difficult to support technically in the field. To help facilitate large-scale multicenter studies in sub-Saharan Africa, the African Partnership for Chronic Disease Research (APCDR) has developed an open-source electronic questionnaire (EQ).

STUDY DESIGN AND SETTING: To assess its relative validity, we compared the EQ against traditional pen-and-paper methods using 200 randomized interviews conducted in an ongoing type 2 diabetes case-control study in South Africa.

RESULTS: During its 3-month validation, the EQ had a lower frequency of errors (EQ, 0.17 errors per 100 questions; paper, 0.73 errors per 100 questions; P-value <=0.001), and a lower monetary cost per correctly entered question, compared with the pen-and-paper method. We found no marked difference in the average duration of the interview between methods (EQ, 5.4~minutes; paper, 5.6~minutes).

CONCLUSION: This validation study suggests that the EQ may offer increased accuracy, similar interview duration, and increased cost-effectiveness compared with paper-based data collection methods. The APCDR EQ software is freely available (https://github.com/apcdr/questionnaire).

}, issn = {1878-5921}, doi = {10.1016/j.jclinepi.2014.06.012}, author = {Dillon, David G and Pirie, Fraser and Pomilla, Cristina and Sandhu, Manjinder S and Motala, Ayesha A and Young, Elizabeth H} } @article {1004, title = {An open-source, mobile-friendly search engine for public medical knowledge.}, journal = {Stud Health Technol Inform}, volume = {205}, year = {2014}, month = {2014}, pages = {358-62}, abstract = {

The World Wide Web has become an important source of information for medical practitioners. To complement the capabilities of currently available web search engines we developed FindMeEvidence, an open-source, mobile-friendly medical search engine. In a preliminary evaluation, the quality of results from FindMeEvidence proved to be competitive with those from TRIP Database, an established, closed-source search engine for evidence-based medicine.

}, keywords = {Computers, Handheld, Consumer Health Information, Data Mining, Evidence-Based Medicine, Information Dissemination, Internet, Search Engine, Software, Software Design, User-Computer Interface}, issn = {0926-9630}, author = {Samwald, Matthias and Hanbury, Allan} } @article {1027, title = {Possible combinations of electronic data capture and randomization systems. principles and the realization with RANDI2 and OpenClinica.}, journal = {Methods Inf Med}, volume = {53}, year = {2014}, month = {2014}, pages = {202-7}, abstract = {

BACKGROUND: Clinical trials (CT) are in a wider sense experiments to prove and establish clinical benefit of treatments. Nowadays electronic data capture systems (EDCS) are used more often bringing a better data management and higher data quality into clinical practice. Also electronic systems for the randomization are used to assign the patients to the treatments.

OBJECTIVES: If the mentioned randomization system (RS) and EDCS are used, possibly identical data are collected in both, especially by stratified randomization. This separated data storage may lead to data inconsistency and in general data samples have to be aligned. The article discusses solutions to combine RS and EDCS. In detail one approach is realized and introduced.

METHODS: Different possible settings of combination of EDCS and RS are determined and the pros and cons for each solution are worked out. For the combination of two independent applications the necessary interfaces for the communication are defined. Thereby, existing standards are considered. An example realization is implemented with the help of open-source applications and state-of-the-art software development procedures.

RESULTS: Three possibilities of separate usage or combination of EDCS and RS are presented and assessed: i) the complete independent usage of both systems; ii) realization of one system with both functions; and iii) two separate systems, which communicate via defined interfaces. In addition a realization of our preferred approach, the combination of both systems, is introduced using the open source tools RANDI2 and OpenClinica.

CONCLUSION: The advantage of a flexible independent development of EDCS and RS is shown based on the fact that these tool are very different featured. In our opinion the combination of both systems via defined interfaces fulfills the requirements of randomization and electronic data capture and is feasible in practice. In addition, the use of such a setting can reduce the training costs and the error-prone duplicated data entry.

}, keywords = {Automatic Data Processing, Computer Communication Networks, Humans, Medical Informatics Computing, Medical Records Systems, Computerized, Random Allocation, Randomized Controlled Trials as Topic, Software Design}, issn = {0026-1270}, doi = {10.3414/ME13-01-0074}, author = {Schrimpf, D and Haag, M and Pilz, L R} } @article {1026, title = {Simplifying electronic data capture in clinical trials: workflow embedded image and biosignal file integration and analysis via web services.}, journal = {J Digit Imaging}, volume = {27}, year = {2014}, month = {2014 Oct}, pages = {571-80}, abstract = {

To improve data quality and save cost, clinical trials are nowadays performed using electronic data capture systems (EDCS) providing electronic case report forms (eCRF) instead of paper-based CRFs. However, such EDCS are insufficiently integrated into the medical workflow and lack in interfacing with other study-related systems. In addition, most EDCS are unable to handle image and biosignal data, although electrocardiography (EGC, as example for one-dimensional (1D) data), ultrasound (2D data), or magnetic resonance imaging (3D data) have been established as surrogate endpoints in clinical trials. In this paper, an integrated workflow based on OpenClinica, one of the world{\textquoteright}s largest EDCS, is presented. Our approach consists of three components for (i) sharing of study metadata, (ii) integration of large volume data into eCRFs, and (iii) automatic image and biosignal analysis. In all components, metadata is transferred between systems using web services and JavaScript, and binary large objects (BLOBs) are sent via the secure file transfer protocol and hypertext transfer protocol. We applied the close-looped workflow in a multicenter study, where long term (7~days/24~h) Holter ECG monitoring is acquired on subjects with diabetes. Study metadata is automatically transferred into OpenClinica, the 4~GB BLOBs are seamlessly integrated into the eCRF, automatically processed, and the results of signal analysis are written back into the eCRF immediately.

}, keywords = {Algorithms, Automatic Data Processing, Clinical Trials as Topic, Database Management Systems, Humans, Image Processing, Computer-Assisted, Information Storage and Retrieval, Internet, Medical Records Systems, Computerized, Systems Integration, Workflow}, issn = {1618-727X}, doi = {10.1007/s10278-014-9694-z}, author = {Haak, Daniel and Samsel, Christian and Gehlen, Johan and Jonas, Stephan and Deserno, Thomas M} } @article {949, title = {Surgical planning of Isshiki type I thyroplasty using an open-source Digital Imaging and Communication in Medicine viewer OsiriX.}, journal = {Acta Otolaryngol}, volume = {134}, year = {2014}, month = {2014 Jun}, pages = {620-5}, abstract = {

CONCLUSION: Preoperative surgical planning of Isshiki type I thyroplasty with the Digital Imaging and Communication in Medicine (DICOM) viewer OsiriX can be used for strategic and predictable type I thyroplasty.

OBJECTIVES: The aim of this study was to determine the efficacy of the preoperative planning of type I thyroplasty using the DICOM viewer OsiriX.

METHODS: Five patients with unilateral vocal cord paralysis and severe breathy dysphonia were included in this study. Multidetector computed tomography (CT) DICOM data were obtained and breath holding was performed during image acquisition. Using three-dimensional multiplanar reconstruction, we outlined the optimal location for a window. Type I thyroplasty was performed using Isshiki{\textquoteright}s original method, and only the placement of the window was decided according to the preoperative simulation point. To verify the advantages of this method, we compared our data with the previous data for total operation time and voice quality at 3 months after the operation without the DICOM viewer planning.

RESULTS: All patients are free from dysphonia and their glottic closures have resolved satisfactorily. Postoperative CT revealed that appropriate implant positioning resulted in successful surgical intervention. The comparison of total operation time showed that with the new method, operation duration was shortened by 12 min.

}, issn = {1651-2251}, doi = {10.3109/00016489.2014.880796}, author = {Hara, Hirotaka and Hori, Takeshi and Sugahara, Kazuma and Yamashita, Hiroshi} } @article {1001, title = {tranSMART: An Open Source Knowledge Management and High Content Data Analytics Platform.}, journal = {AMIA Jt Summits Transl Sci Proc}, volume = {2014}, year = {2014}, month = {2014}, pages = {96-101}, abstract = {

The tranSMART knowledge management and high-content analysis platform is a flexible software framework featuring novel research capabilities. It enables analysis of integrated data for the purposes of hypothesis generation, hypothesis validation, and cohort discovery in translational research. tranSMART bridges the prolific world of basic science and clinical practice data at the point of care by merging multiple types of data from disparate sources into a common environment. The application supports data harmonization and integration with analytical pipelines. The application code was released into the open source community in January 2012, with 32 instances in operation. tranSMART{\textquoteright}s extensible data model and corresponding data integration processes, rapid data analysis features, and open source nature make it an indispensable tool in translational or clinical research.

}, issn = {2153-4063}, author = {Scheufele, Elisabeth and Aronzon, Dina and Coopersmith, Robert and McDuffie, Michael T and Kapoor, Manish and Uhrich, Christopher A and Avitabile, Jean E and Liu, Jinlei and Housman, Dan and Palchuk, Matvey B} } @article {948, title = {Utilization of open source electronic health record around the world: A systematic review.}, journal = {J Res Med Sci}, volume = {19}, year = {2014}, month = {2014 Jan}, pages = {57-64}, abstract = {

Many projects on developing Electronic Health Record (EHR) systems have been carried out in many countries. The current study was conducted to review the published data on the utilization of open source EHR systems in different countries all over the world. Using free text and keyword search techniques, six bibliographic databases were searched for related articles. The identified papers were screened and reviewed during a string of stages for the irrelevancy and validity. The findings showed that open source EHRs have been wildly used by source limited regions in all continents, especially in Sub-Saharan Africa and South America. It would create opportunities to improve national healthcare level especially in developing countries with minimal financial resources. Open source technology is a solution to overcome the problems of high-costs and inflexibility associated with the proprietary health information systems.

}, issn = {1735-1995}, author = {Aminpour, Farzaneh and Sadoughi, Farahnaz and Ahamdi, Maryam} } @article {1009, title = {AccessMRS: integrating OpenMRS with smart forms on Android.}, journal = {Stud Health Technol Inform}, volume = {192}, year = {2013}, month = {2013}, pages = {866-70}, abstract = {

We present a new open-source Android application, AccessMRS, for interfacing with an electronic medical record system (OpenMRS) and loading {\textquoteright}Smart Forms{\textquoteright} on a mobile device. AccessMRS functions as a patient-centered interface for viewing OpenMRS data; managing patient information in reminders, task lists, and previous encounters; and launching patient-specific {\textquoteright}Smart Forms{\textquoteright} for electronic data collection and dissemination of health information. We present AccessMRS in the context of related software applications we developed to serve Community Health Workers, including AccessInfo, AccessAdmin, AccessMaps, and AccessForms. The specific features and design of AccessMRS are detailed in relationship to the requirements that drove development: the workflows of the Kenyan Ministry of Health Community Health Volunteers (CHVs) supported by the AMPATH Primary Health Care Program. Specifically, AccessMRS was designed to improve the quality of community-based Maternal and Child Health services delivered by CHVs in Kosirai Division. AccessMRS is currently in use by more than 80 CHVs in Kenya and undergoing formal assessment of acceptability, effectiveness, and cost.

}, keywords = {electronic health records, Forms and Records Control, Information Storage and Retrieval, Medical Record Linkage, Mobile Applications, Programming Languages, Software Design, Systems Integration, User-Computer Interface}, issn = {0926-9630}, author = {Fazen, Louis E and Chemwolo, Benjamin T and Songok, Julia J and Ruhl, Laura J and Kipkoech, Carolyne and Green, James M and Ikemeri, Justus E and Christoffersen-Deb, Astrid} } @article {848, title = {A common type system for clinical natural language processing.}, journal = {J Biomed Semantics}, volume = {4}, year = {2013}, month = {2013}, pages = {1}, abstract = {

UNLABELLED:

BACKGROUND: One challenge in reusing clinical data stored in electronic medical records is that these data are heterogenous. Clinical Natural Language Processing (NLP) plays an important role in transforming information in clinical text to a standard representation that is comparable and interoperable. Information may be processed and shared when a type system specifies the allowable data structures. Therefore, we aim to define a common type system for clinical NLP that enables interoperability between structured and unstructured data generated in different clinical settings.

RESULTS: We describe a common type system for clinical NLP that has an end target of deep semantics based on Clinical Element Models (CEMs), thus interoperating with structured data and accommodating diverse NLP approaches. The type system has been implemented in UIMA (Unstructured Information Management Architecture) and is fully functional in a popular open-source clinical NLP system, cTAKES (clinical Text Analysis and Knowledge Extraction System) versions 2.0 and later.

CONCLUSIONS: We have created a type system that targets deep semantics, thereby allowing for NLP systems to encapsulate knowledge from text and share it alongside heterogenous clinical data sources. Rather than surface semantics that are typically the end product of NLP algorithms, CEM-based semantics explicitly build in deep clinical semantics as the point of interoperability with more structured data types.

}, issn = {2041-1480}, doi = {10.1186/2041-1480-4-1}, author = {Wu, Stephen T and Kaggal, Vinod C and Dligach, Dmitriy and Masanz, James J and Chen, Pei and Becker, Lee and Chapman, Wendy W and Savova, Guergana K and Liu, Hongfang and Chute, Christopher G} } @inbook {1091, title = {Designing Sustainable Open Source Systems: The Cuban National Health Care Network and Portal (INFOMED)}, booktitle = {Grand Successes and Failures in IT. Public and Private Sectors}, series = {IFIP Advances in Information and Communication Technology}, volume = {402}, year = {2013}, pages = {451-466}, publisher = {Springer Berlin Heidelberg}, organization = {Springer Berlin Heidelberg}, keywords = {Cuba, INFOMED, national health care systems, open source software, qualitative research methods, virtual infrastructures}, isbn = {978-3-642-38861-3}, doi = {10.1007/978-3-642-38862-0_28}, url = {http://dx.doi.org/10.1007/978-3-642-38862-0_28}, author = {S{\'e}ror, Ann}, editor = {Dwivedi, YogeshK. and Henriksen, HelleZinner and Wastell, David and De{\textquoteright}, Rahul} } @article {863, title = {DICOM RIS/PACS Telemedicine Network Implementation using Free Open Source Software}, journal = {Latin America Transactions, IEEE (Revista IEEE America Latina)}, volume = {11}, year = {2013}, pages = {168-171}, abstract = {The increasing incorporation of modern medical imaging equipment requires the operation of systems that store, transmit and display images, PACS (Picture Archiving and Communication System) via digital networks to provide health services with higher quality. These revolutionized radiology systems based on digital techniques, communications, visualization and information technologies. One of the major problems that arise in institutions working with Imaging Diagnosis (Dx) in the interconnection network or equipment, are the "bottlenecks" caused by the high flow of network data. This flux is given by the transfer of images in DICOM (Digital Imaging and Communications in Medicine) to the various devices that make up a DICOM network, called Service Class Users (SCU) and Service Class Providers (SCP). This paper describes the development and implementation of a network of medical images using free software and its compatibility with its own data network of a RIS (Radiology Information System). Describes techniques used for the interconnection of local nodes on a LAN (Local Area Network) remote WAN Wide Area Network) via Internet and WLAN (Wireless Area Network.}, keywords = {DICOM, Free Software, Medical Imaging, Telemedicine}, issn = {1548-0992}, doi = {10.1109/TLA.2013.6502797}, author = {Alvarez, Luis R. and Vargas Solis, Roberto} } @article {1010, title = {E-health systems for management of MDR-TB in resource-poor environments: a decade of experience and recommendations for future work.}, journal = {Stud Health Technol Inform}, volume = {192}, year = {2013}, month = {2013}, pages = {627-31}, abstract = {

INTRODUCTION: Multi-drug resistant TB (MDR-TB) is a complex infectious disease that is a growing threat to global health. It requires lengthy treatment with multiple drugs and specialized laboratory testing. To effectively scale up treatment to thousands of patients requires good information systems to support clinical care, reporting, drug forecasting, supply chain management and monitoring.

METHODS: Over the last decade we have developed the PIH-EMR electronic medical record system, and subsequently OpenMRS-TB, to support the treatment of MDR-TB in Peru, Haiti, Pakistan, and other resource-poor environments.

RESULTS: We describe here the experience with implementing these systems and evaluating many aspects of their performance, and review other systems for MDR-TB management.

CONCLUSIONS: We recommend a new approach to information systems to address the barriers to scale up MDR-TB treatment, particularly access to the appropriate drugs and lab data. We propose moving away from fragmented, vertical systems to focus on common platforms, addressing all stages of TB care, support for open data standards and interoperability, care for a wide range of diseases including HIV, integration with mHealth applications, and ability to function in resource-poor environments.

}, keywords = {Developing Countries, electronic health records, Electronic Prescribing, Extensively Drug-Resistant Tuberculosis, Haiti, Health Information Management, Humans, Medication Systems, Hospital, Pakistan, Remote Consultation}, issn = {0926-9630}, author = {Fraser, Hamish S F and Habib, Ali and Goodrich, Mark and Thomas, David and Blaya, Joaquin A and Fils-Aime, Joseph Reginald and Jazayeri, Darius and Seaton, Michael and Khan, Aamir J and Choi, Sharon S and Kerrison, Foster and Falzon, Dennis and Becerra, Mercedes C} } @article {891, title = {Fundus image diagnostic agreement in uveitis utilizing free and open source software.}, journal = {Can J Ophthalmol}, volume = {48}, year = {2013}, month = {2013 Aug}, pages = {227-34}, abstract = {

OBJECTIVE: To assess the adequacy of image agreement regarding uveitis based on color fundus and fluorescein angiography images alone, and to use free and open source applications to conduct an image agreement study.

DESIGN: Cross-sectional agreement study.

PARTICIPANTS: Baseline fundus and fluorescein images of patients with panuveitis, posterior, or intermediate uveitis enrolled in the Multi-center Uveitis Steroid Treatment (MUST) trial.

METHODS: Three fellowship-trained specialists in uveitis independently reviewed patient images using ClearCanvas{\texttrademark} and responded using Epi Info{\texttrademark}. The diagnoses of the 3 reviewers were compared with the MUST clinician as a gold standard. A rank transformation adjusted for the possible variation in number of responses per patient. Chance-corrected interobserver agreement among the 3 reviewers was estimated with the ι coefficient. Confidence interval (CI) and SE were bootstrapped.

RESULTS: Agreement between the diagnoses of the respondents and the baseline MUST clinician{\textquoteright}s diagnosis was poor across all diagnostic categories, ι = 0.09 (95\% CI, 0.07-0.11). The agreement among respondents alone also was poor, ι = 0.11 {\textpm} 0.02 (95\% CI, 0.08-0.13). The specialists requested more patient historical and clinical information to make a diagnosis on all patients.

CONCLUSIONS: The role in distinguishing the multiple conditions in uveitis appears to be limited when based on fundus imaging alone. Future studies should investigate different categories of clinical data to supplement image data. Freely available applications have excellent utility in ophthalmic imaging agreement studies.

}, issn = {0008-4182}, doi = {10.1016/j.jcjo.2013.02.010}, author = {Hsieh, Jane and Honda, Andrea F and Su{\'a}rez-Fari{\~n}as, Mayte and Samson, C Michael and Kedhar, Sanjay and Mauro, John and Francis, Jasmine and Badamo, Jason and Diaz, Vicente A and Kempen, John H and Latkany, Paul A} } @article {855, title = {Health care transformation through collaboration on open-source informatics projects: integrating a medical applications platform, research data repository, and patient summarization.}, journal = {Interact J Med Res}, volume = {2}, year = {2013}, month = {2013}, pages = {e11}, abstract = {

BACKGROUND: The Strategic Health IT Advanced Research Projects (SHARP) program seeks to conquer well-understood challenges in medical informatics through breakthrough research. Two SHARP centers have found alignment in their methodological needs: (1) members of the National Center for Cognitive Informatics and Decision-making (NCCD) have developed knowledge bases to support problem-oriented summarizations of patient data, and (2) Substitutable Medical Apps, Reusable Technologies (SMART), which is a platform for reusable medical apps that can run on participating platforms connected to various electronic health records (EHR). Combining the work of these two centers will ensure wide dissemination of new methods for synthesized views of patient data. Informatics for Integrating Biology and the Bedside (i2b2) is an NIH-funded clinical research data repository platform in use at over 100 sites worldwide. By also working with a co-occurring initiative to SMART-enabling i2b2, we can confidently write one app that can be used extremely broadly.

OBJECTIVE: Our goal was to facilitate development of intuitive, problem-oriented views of the patient record using NCCD knowledge bases that would run in any EHR. To do this, we developed a collaboration between the two SHARPs and an NIH center, i2b2.

METHODS: First, we implemented collaborative tools to connect researchers at three institutions. Next, we developed a patient summarization app using the SMART platform and a previously validated NCCD problem-medication linkage knowledge base derived from the National Drug File-Reference Terminology (NDF-RT). Finally, to SMART-enable i2b2, we implemented two new Web service "cells" that expose the SMART application programming interface (API), and we made changes to the Web interface of i2b2 to host a "carousel" of SMART apps.

RESULTS: We deployed our SMART-based, NDF-RT-derived patient summarization app in this SMART-i2b2 container. It displays a problem-oriented view of medications and presents a line-graph display of laboratory results.

CONCLUSIONS: This summarization app can be run in any EHR environment that either supports SMART or runs SMART-enabled i2b2. This i2b2 "clinical bridge" demonstrates a pathway for reusable app development that does not require EHR vendors to immediately adopt the SMART API. Apps can be developed in SMART and run by clinicians in the i2b2 repository, reusing clinical data extracted from EHRs. This may encourage the adoption of SMART by supporting SMART app development until EHRs adopt the platform. It also allows a new variety of clinical SMART apps, fueled by the broad aggregation of data types available in research repositories. The app (including its knowledge base) and SMART-i2b2 are open-source and freely available for download.

}, issn = {1929-073X}, doi = {10.2196/ijmr.2454}, author = {Klann, Jeffrey G and McCoy, Allison B and Wright, Adam and Wattanasin, Nich and Sittig, Dean F and Murphy, Shawn N} } @article {1035, title = {A hybrid approach to telepathology in Cambodia.}, journal = {J Telemed Telecare}, volume = {19}, year = {2013}, month = {2013 Dec}, pages = {475-8}, abstract = {

We established a hybrid telepathology network at the Children{\textquoteright}s Surgical Centre (CSC) in Cambodia, based on store-and-forward communication using iPATH and videoconferencing using Skype. We retrospectively analysed all data from the CSC stored on the iPATH server and reviewed the patient notes over an 8-month period. Of 115 patients for histopathology diagnosis during the study period, 38 cases were uploaded onto iPATH for further telemedicine discussion. The median number of days it took a specialist, other than the local one, to comment on the case on iPATH was 5 days (range 0-15). In three cases (8\%) there was no reply from a specialist on iPATH. During the study period, seven clinical conferences were held, with an average of 6 cases (range 4-7) discussed at each conference. All 38 cases discussed had a final agreed diagnosis and firm management plans were made. Of the 24 cases where proactive management was advised, 17 patients followed through with the recommendations. Although the combination of video consultations and store-and-forward communication has not been used much before in the developing world, it has benefited patient care and outcomes at the CSC.

}, keywords = {Adolescent, Adult, Aged, Aged, 80 and over, Cambodia, Child, Child, Preschool, Female, Humans, Internet, Male, Middle Aged, Remote Consultation, Retrospective Studies, Surgical Procedures, Operative, telepathology, Videoconferencing, Young Adult}, issn = {1758-1109}, doi = {10.1177/1357633X13512071}, author = {Kadaba, Varsha and Ly, Tho and Noor, Saqib and Chhut, Serey V and Hinsch, Nora and Stauch, Gerhard and Gollogly, James} } @article {1011, title = {Informatics lessons from using a novel immunization information system.}, journal = {Stud Health Technol Inform}, volume = {192}, year = {2013}, month = {2013}, pages = {589-93}, abstract = {

In the 1990s, NewYork-Presbyterian Hospital began developing a comprehensive, standards-based immunization information system. The system, known as EzVac, has been operational since 1998 and now includes information on 2 million immunizations administered to more than 260,000 individuals. The system exchanges data with multiple electronic health records, a public health immunization registry, and a standalone personal health record. EzVac modules have recently been incorporated into the OpenMRS application and are being used to enhance immunization efforts in developing nations. We report on our experience with developing and using the EzVac system for 1) clinical care, both in local and global settings, 2) public health reporting, 3) consumer engagement, and 4) clinical and informatics research. We emphasize the advantages of using health IT standards to facilitate immunization information exchange in each of these domains.

}, keywords = {electronic health records, Health Information Management, Immunization, Internet, Medical Record Linkage, New York, Registries, Software}, issn = {0926-9630}, author = {Vawdrey, David K and Natarajan, Karthik and Kanter, Andrew S and Hripcsak, George and Kuperman, Gilad J and Stockwell, Melissa S} } @article {864, title = {Integration of PACS and CAD systems using DICOMDIR and open-source tools}, year = {2013}, pages = {86740V-86740V-6}, abstract = {The advancements of the last 30 years have made picture archiving and communication system (PACS) an indispensable technology to improve the delivery and management of clinical imaging services. Similarly, the maturation of algorithms and computer aided detection (CAD) systems has enhanced the interpretation and diagnosis of radiographical images. However, the lack of integration between the two systems inhibits the rate of development and application of these recent innovations in reaching the clinical users of PACS. We aim to enhance the clinical efficiency of CAD systems by developing an accessible, fully automated, user-friendly, and integrated linkage of CAD and PACS systems. This is the first integration initiative to take advantage of DICOMDIR file and its ability to index DICOM files, allowing images outside of PACS to be viewed within PACS. In this demonstration, the CAD system evaluates CT chest exams to detect lesions in the ribs and produces whole rib map images, screenshots, and detection report. A script executes the rib CAD system and creates a DICOMDIR file using {\quotedblbase}DCMTK", an open-source DICOM toolkit. We evaluated our system on thirty 5mm slice thickness and thirty 2mm slice thickness image studies and demonstrated a time saving efficiency of 93s{\textpm}14s and 221s{\textpm}17s per exam, respectively, compared to the current non-integrated workflow of using CAD systems. The advantages of this system are that it is easy to implement, requires no additional workstation and training, and allows CAD results to be viewed in PACS without disrupting radiology workflow, while maintaining the independence of both technologies. {\textcopyright} (2013) COPYRIGHT Society of Photo-Optical Instrumentation Engineers (SPIE).}, doi = {10.1117/12.2001560}, url = {+ http://dx.doi.org/10.1117/12.2001560}, author = {Huang, Jiaxin and Ling, Alexander and Summers, Ronald M. and Yao, Jianhua} } @article {852, title = {The Medical Imaging Interaction Toolkit: challenges and advances : 10 years of open-source development.}, journal = {Int J Comput Assist Radiol Surg}, year = {2013}, month = {2013 Apr 16}, abstract = {

PURPOSE: ~~~The Medical Imaging Interaction Toolkit (MITK) has been available as open-source software for almost 10 years now. In this period the requirements of software systems in the medical image processing domain have become increasingly complex. The aim of this paper is to show how MITK evolved into a software system that is able to cover all steps of a clinical workflow including data retrieval, image analysis, diagnosis, treatment planning, intervention support, and treatment control. METHODS: ~~~MITK provides modularization and extensibility on different levels. In addition to the original toolkit, a module system, micro services for small, system-wide features, a service-oriented architecture based on the Open Services Gateway initiative (OSGi) standard, and an extensible and configurable application framework allow MITK to be used, extended and deployed as needed. A refined software process was implemented to deliver high-quality software, ease the fulfillment of regulatory requirements, and enable teamwork in mixed-competence teams. RESULTS: ~~~MITK has been applied by a worldwide community and integrated into a variety of solutions, either at the toolkit level or as an application framework with custom extensions. The MITK Workbench has been released as a highly extensible and customizable end-user application. Optional support for tool tracking, image-guided therapy, diffusion imaging as well as various external packages (e.g. CTK, DCMTK, OpenCV, SOFA, Python) is available. MITK has also been used in several FDA/CE-certified applications, which demonstrates the high-quality software and rigorous development process. CONCLUSIONS: ~~~MITK provides a versatile platform with a high degree of modularization and interoperability and is well suited to meet the challenging tasks of today{\textquoteright}s and tomorrow{\textquoteright}s clinically motivated research.

}, issn = {1861-6429}, doi = {10.1007/s11548-013-0840-8}, author = {Nolden, Marco and Zelzer, Sascha and Seitel, Alexander and Wald, Diana and M{\"u}ller, Michael and Franz, Alfred M and Maleike, Daniel and Fangerau, Markus and Baumhauer, Matthias and Maier-Hein, Lena and Maier-Hein, Klaus H and Meinzer, Hans -Peter and Wolf, Ivo} } @inbook {866, title = {MITK-US: Echtzeitverarbeitung von Ultraschallbildern in MITK}, booktitle = {Bildverarbeitung f{\"u}r die Medizin 2013}, series = {Informatik aktuell}, year = {2013}, pages = {302-307}, publisher = {Springer Berlin Heidelberg}, organization = {Springer Berlin Heidelberg}, abstract = {Ultraschall (US) als bildgebendes Verfahren in der Medizin ist nicht invasiv, schnell, vielerorts verf{\"u}gbar, kommt ohne Strahlenbelastung aus und liefert kontinuierlich Daten in Echtzeit. Die Nutzung von US f{\"u}r computerassistierte Interventionen (CAI) stellt jedoch nicht nur extrem hohe Anforderungen an die Methoden zur Bildverarbeitung aufgrund der beschr{\"a}nkten Bildqualit{\"a}t, sondern bedeutet auch einen betr{\"a}chtlichen Integrationsaufwand wenn die Daten in Echtzeit weiterverarbeitet werden sollen. Mit MITK-US stellen wir in dieser Arbeit ein neues Modul f{\"u}r das Open Source verf{\"u}gbare Medical Imaging Interaction Toolkit (MITK) vor, welches die einheitliche Einbindung und Weiterverarbeitung von Echtzeitultraschalldaten erm{\"o}glicht und somit den Aufwand f{\"u}r die Integration von US in CAI Systeme verringert. Da die Verwendung von Echtzeitdaten insbesondere im Bereich der CAI zahlreiche neue M{\"o}glichkeiten bietet, erwarten wir einen hohen Nutzen dieses Moduls f{\"u}r k{\"u}nftige Projekte.}, isbn = {978-3-642-36479-2}, doi = {10.1007/978-3-642-36480-8_53}, url = {http://dx.doi.org/10.1007/978-3-642-36480-8_53}, author = {Franz, AlfredMichael and M{\"a}rz, Keno and Seitel, Alexander and M{\"u}ller, Michael and Zelzer, Sascha and Nodeln, Marco and Meinzer, Hans-Peter and Maier-Hein, Lena}, editor = {Meinzer, Hans-Peter and Deserno, Thomas Martin and Handels, Heinz and Tolxdorff, Thomas} } @article {894, title = {Novel open-source electronic medical records system for palliative care in low-resource settings.}, journal = {BMC Palliat Care}, volume = {12}, year = {2013}, month = {2013}, pages = {31}, abstract = {

BACKGROUND: The need for palliative care in sub-Saharan Africa is staggering: this region shoulders over 67\% of the global burden of HIV/AIDS and cancer. However, provisions for these essential services remain limited and poorly integrated with national health systems in most nations. Moreover, the evidence base for palliative care in the region remains scarce. This study chronicles the development and evaluation of DataPall, an open-source electronic medical records system that can be used to track patients, manage data, and generate reports for palliative care providers in these settings.DataPall was developed using design criteria encompassing both functional and technical objectives articulated by hospital leaders and palliative care staff at a leading palliative care center in Malawi. The database can be used with computers that run Windows XP SP 2 or newer, and does not require an internet connection for use. Subsequent to its development and implementation in two hospitals, DataPall was tested among both trained and untrained hospital staff populations on the basis of its usability with comparison to existing paper records systems as well as on the speed at which users could perform basic database functions. Additionally, all participants evaluated this program on a standard system usability scale.

RESULTS: In a study of health professionals in a Malawian hospital, DataPall enabled palliative care providers to find patients{\textquoteright} appointments, on average, in less than half the time required to locate the same record in current paper records. Moreover, participants generated customizable reports documenting patient records and comprehensive reports on providers{\textquoteright} activities with little training necessary. Participants affirmed this ease of use on the system usability scale.

CONCLUSIONS: DataPall is a simple, effective electronic medical records system that can assist in developing an evidence base of clinical data for palliative care in low resource settings. The system is available at no cost, is specifically designed to chronicle care in the region, and is catered to meet the technical needs and user specifications of such facilities.

}, issn = {1472-684X}, doi = {10.1186/1472-684X-12-31}, author = {Shah, Kamal G and Slough, Tara Lyn and Yeh, Ping Teresa and Gombwa, Suave and Kiromera, Athanase and Oden, Z Maria and Richards-Kortum, Rebecca R} } @article {875, title = {Three-dimensional volumetric rendition of cannon ball pulmonary metastases by the use of 3D Slicer, an open source free software package.}, journal = {BMJ Case Rep}, volume = {2013}, year = {2013}, month = {2013}, issn = {1757-790X}, doi = {10.1136/bcr-2012-008248}, author = {Revannasiddaiah, Swaroop and Susheela, Sridhar Papaiah and Madhusudhan, N and Mallarajapatna, Govindarajan J} } @article {887, title = {Use of Open Source Software in Health Care Delivery - Results of a Qualitative Field Study. Contribution of the EFMI LIFOSS Working Group.}, journal = {Yearb Med Inform}, volume = {8}, year = {2013}, month = {2013}, pages = {107-13}, abstract = {

OBJECTIVES: To assess and analyze the attitude of health IT executives towards the utilization of specialized medical Open Source software (OSS) in Germany{\textquoteright}s and other European countries{\textquoteright} health care delivery.

METHODS: After an initial literature review a field study was carried out based on semi-structured expert interviews. Eight German and 11 other European health IT executives were surveyed. The results were qualitatively analyzed using the grounded theory approach. Identified concepts were reviewed using SWOT analysis.

RESULTS: In total, 13 strengths, 11 weaknesses, 3 opportunities, and 8 threats of the utilization of OSS in a clinical setting could be identified. Additionally, closely related aspects like general software procurement criteria, the overall attitude of health IT executives, users, and management towards OSS and its current and future use could as well be assessed.

CONCLUSIONS: Medical OSS is rarely used in health care delivery. In order to capitalize the unique advantages of OSS in a clinical setting, complex requirements need to be addressed. Shortcomings of OSS describe an attractive breeding ground for new commercial offerings and services that need yet to be seen.

}, issn = {0943-4747}, author = {Schmuhl, H and Heinze, O and Bergh, B} } @article {896, title = {Utility of an open-source DICOM viewer software (OsiriX) to assess pulmonary fibrosis in systemic sclerosis: preliminary results.}, journal = {Rheumatol Int}, year = {2013}, month = {2013 Aug 15}, abstract = {

To investigate the utility of an open-source Digital Imaging and Communication in Medicine viewer software-OsiriX-to assess pulmonary fibrosis (PF) in patients with systemic sclerosis (SSc). Chest high-resolution computed tomography (HRCT) examinations obtained from 10 patients with diagnosis of SSc were analysed by two radiologists adopting a standard semiquantitative scoring for PF. Pulmonary involvement was evaluated in three sections (superior, middle and inferior). For the assessment of the extension of PF, the adopted semiquantitative HRCT score ranged from 0 to 3 (0~=~absence of PF; 1~=~1-20~\% of lung section involvement; 2~=~21-40~\% of lung section involvement; 3~=~41-100~\% of lung section involvement). Further, a quantitative assessment (i.e. parameters of distribution of lung attenuation such as kurtosis and mean lung attenuation) of PF was independently performed on the same sections by a rheumatologist, independently and blinded to radiologists{\textquoteright} scoring, using OsiriX. The results obtained were compared with those of HRCT semiquantitative analysis. Intra-reader reliability of HRCT findings and feasibility of OsiriX quantitative segmentation was recorded. A significant association between the median values of kurtosis by both the quantitative OsiriX assessment and the HRCT semiquantitative analysis was found (p~<~0.0001). Moreover, kurtosis correlated significantly with the mean lung attenuation (Spearman{\textquoteright}s rho~=~0.885; p~=~0.0001). An excellent intra-reader reliability of HRCT findings among both readers was obtained. A significant difference between the mean time spent on the OsiriX quantitative analysis (mean 1.85~{\textpm}~SD 1.3~min) and the mean time spent by the radiologist for the HRCT semiquantitative assessment (mean 8.5~{\textpm}~SD 4.5~min, p~<~0.00001) was noted. The study provides the new working hypothesis that OsiriX may be a useful and feasible tool to achieve a quantitative evaluation of PF in SSc patients.

}, issn = {1437-160X}, doi = {10.1007/s00296-013-2845-6}, author = {Ariani, Alarico and Carotti, Marina and Gutierrez, Marwin and Bichisecchi, Elisabetta and Grassi, Walter and Giuseppetti, Gian Marco and Salaffi, Fausto} } @article {1280, title = {What do electronic health record vendors reveal about their products: an analysis of vendor websites.}, journal = {J Med Internet Res}, volume = {15}, year = {2013}, month = {2013 Feb 19}, pages = {e36}, abstract = {

BACKGROUND: Purchasing electronic health records (EHRs) typically follows a process in which potential adopters actively seek information, compare alternatives, and form attitudes towards the product. A potential source of information on EHRs that can be used in the process is vendor websites. It is unclear how much product information is presented on EHR vendor websites or the extent of its value during EHR purchasing decisions.

OBJECTIVE: To explore what features of EHR systems are presented by vendors in Ontario, Canada, on their websites, and the persuasive means they use to market such systems; to compare the online information available about primary care EHR systems with that about hospital EHR systems, and with data compiled by OntarioMD, a regional certifying agency.

METHODS: A list of EHR systems available in Ontario was created. The contents of vendor websites were analyzed. A template for data collection and organization was developed and used to collect and organize information on the vendor, website content, and EHR features. First, we mapped information on system features to categories based on a framework from the Institute of Medicine (IOM). Second, we used a grounded theory-like approach to explore information for building consumer confidence in the vendor and product, and the various persuasive strategies employed on vendor websites. All data were first coded by one researcher. A peer reviewer independently analyzed a randomly chosen subset of the websites (10 of 21; 48\%) and provided feedback towards a unified coding scheme. All data were then re-coded and categorized into themes. Finally, we compared information from vendor websites and data gathered by OntarioMD.

RESULTS: Vendors provided little specific product information on their websites. Only two of five acute care EHR websites (40\%) and nine of 16 websites for primary care systems (56\%) featured seven or all eight of the IOM components. Several vendor websites included system interface demonstrations: screenshots (six websites), public videos or slideshows (four websites), or for registered viewers only (three websites). Persuasive means used by vendors included testimonials on 14/21 (67\%) websites, and directional language. Except for one free system, trial EHR versions were not available. OntarioMD provided more comprehensive information about primary care systems than the vendors{\textquoteright} websites. Of 14 points of comparison, only the inclusion of templates and bilingual interfaces were fully represented in both data sources. For all other categories, the vendor websites were less complete than the OntarioMD site.

CONCLUSIONS: EHR vendor websites employ various persuasive means, but lack product-specific information and do not provide options for trying systems on a limited basis. This may impede the ability of potential adopters to form perceptions and compare various offerings. Both vendors and clients could benefit from greater transparency and more specific product information on the Web.

TRIAL REGISTRATION: N/A.

}, keywords = {Commerce, electronic health records, Humans, Internet, Marketing of Health Services, Ontario, Telemedicine}, issn = {1438-8871}, doi = {10.2196/jmir.2312}, author = {Yeung, Natalie K and Jadad, Alejandro R and Shachak, Aviv} } @article {1021, title = {ARDEN2BYTECODE: a one-pass Arden Syntax compiler for service-oriented decision support systems based on the OSGi platform.}, journal = {Comput Methods Programs Biomed}, volume = {106}, year = {2012}, month = {2012 May}, pages = {114-25}, abstract = {

Patient empowerment might be one key to reduce the pressure on health care systems challenged by the expected demographic changes. Knowledge based systems can, in combination with automated sensor measurements, improve the patients{\textquoteright} ability to review their state of health and make informed decisions. The Arden Syntax as a standardized language to represent medical knowledge can be used to express the corresponding decision rules. In this paper we introduce ARDEN2BYTECODE, a newly developed open source compiler for the Arden Syntax. ARDEN2BYTECODE runs on Java Virtual Machines (JVM) and translates Arden Syntax directly to Java Bytecode (JBC) executable on JVMs. ARDEN2BYTECODE easily integrates into service oriented architectures, like the Open Services Gateway Initiative (OSGi) platform. Apart from an evaluation of compilation performance and execution times, ARDEN2BYTECODE was integrated into an existing knowledge supported exercise training system and recorded training sessions have been used to check the implementation.

}, keywords = {Decision Support Systems, Management, Programming Languages}, issn = {1872-7565}, doi = {10.1016/j.cmpb.2011.11.003}, author = {Gietzelt, Matthias and Goltz, Ursula and Grunwald, Daniel and Lochau, Malte and Marschollek, Michael and Song, Bianying and Wolf, Klaus-Hendrik} } @article {776, title = {Building a robust, scalable and standards-driven infrastructure for secondary use of EHR data: The SHARPn project.}, journal = {J Biomed Inform}, year = {2012}, month = {2012 Feb 4}, abstract = {The Strategic Health IT Advanced Research Projects (SHARP) Program, established by the Office of the National Coordinator for Health Information Technology in 2010 supports research findings that remove barriers for increased adoption of health IT. The improvements envisioned by the SHARP Area 4 Consortium (SHARPn) will enable the use of the electronic health record (EHR) for secondary purposes, such as care process and outcomes improvement, biomedical research and epidemiologic monitoring of the nation{\textquoteright}s health. One of the primary informatics problem areas in this endeavor is the standardization of disparate health data from the nation{\textquoteright}s many health care organizations and providers. The SHARPn team is developing open source services and components to support the ubiquitous exchange, sharing and reuse or {\textquoteright}liquidity{\textquoteright} of operational clinical data stored in electronic health records. One year into the design and development of the SHARPn framework, we demonstrated end to end data flow and a prototype SHARPn platform, using thousands of patient electronic records sourced from two large healthcare organizations: Mayo Clinic and Intermountain Healthcare. The platform was deployed to (1) receive source EHR data in several formats, (2) generate structured data from EHR narrative text, and (3) normalize the EHR data using common detailed clinical models and Consolidated Health Informatics standard terminologies, which were (4) accessed by a phenotyping service using normalized data specifications. The architecture of this prototype SHARPn platform is presented. The EHR data throughput demonstration showed success in normalizing native EHR data, both structured and narrative, from two independent organizations and EHR systems. Based on the demonstration, observed challenges for standardization of EHR data for interoperable secondary use are discussed.}, issn = {1532-0480}, doi = {10.1016/j.jbi.2012.01.009}, author = {Rea, Susan and Pathak, Jyotishman and Savova, Guergana and Oniki, Thomas A and Westberg, Les and Beebe, Calvin E and Tao, Cui and Parker, Craig G and Haug, Peter J and Huff, Stanley M and Chute, Christopher G} } @article {816, title = {Developing open source, self-contained disease surveillance software applications for use in resource-limited settings.}, journal = {BMC Med Inform Decis Mak}, volume = {12}, year = {2012}, month = {2012}, pages = {99}, abstract = {UNLABELLED: ABSTRACT: BACKGROUND: Emerging public health threats often originate in resource-limited countries. In recognition of this fact, the World Health Organization issued revised International Health Regulations in 2005, which call for significantly increased reporting and response capabilities for all signatory nations. Electronic biosurveillance systems can improve the timeliness of public health data collection, aid in the early detection of and response to disease outbreaks, and enhance situational awareness. METHODS: As components of its Suite for Automated Global bioSurveillance (SAGES) program, The Johns Hopkins University Applied Physics Laboratory developed two open-source, electronic biosurveillance systems for use in resource-limited settings. OpenESSENCE provides web-based data entry, analysis, and reporting. ESSENCE Desktop Edition provides similar capabilities for settings without internet access. Both systems may be configured to collect data using locally available cell phone technologies. RESULTS: ESSENCE Desktop Edition has been deployed for two years in the Republic of the Philippines. Local health clinics have rapidly adopted the new technology to provide daily reporting, thus eliminating the two-to-three week data lag of the previous paper-based system. CONCLUSIONS: OpenESSENCE and ESSENCE Desktop Edition are two open-source software products with the capability of significantly improving disease surveillance in a wide range of resource-limited settings. These products, and other emerging surveillance technologies, can assist resource-limited countries compliance with the revised International Health Regulations.}, issn = {1472-6947}, doi = {10.1186/1472-6947-12-99}, author = {Campbell, Timothy C and Hodanics, Charles J and Babin, Steven M and Poku, Adjoa M and Wojcik, Richard A and Skora, Joseph F and Coberly, Jacqueline S and Mistry, Zarna S and Lewis, Sheri H} } @article {815, title = {Free and open-source software application for the evaluation of coronary computed tomography angiography images.}, journal = {Arq Bras Cardiol}, year = {2012}, month = {2012 Oct 2}, abstract = {BACKGROUND: The standardization of images used in Medicine in 1993 was performed using the DICOM (Digital Imaging and Communications in Medicine) standard. Several tests use this standard and it is increasingly necessary to design software applications capable of handling this type of image; however, these software applications are not usually free and open-source, and this fact hinders their adjustment to most diverse interests. OBJECTIVE: To develop and validate a free and open-source software application capable of handling DICOM coronary computed tomography angiography images. METHODS: We developed and tested the ImageLab software in the evaluation of 100 tests randomly selected from a database. We carried out 600 tests divided between two observers using ImageLab and another software sold with Philips Brilliance computed tomography appliances in the evaluation of coronary lesions and plaques around the left main coronary artery (LMCA) and the anterior descending artery (ADA). To evaluate intraobserver, interobserver and intersoftware agreements, we used simple and kappa statistics agreements. RESULTS: The agreements observed between software applications were generally classified as substantial or almost perfect in most comparisons. CONCLUSION: The ImageLab software agreed with the Philips software in the evaluation of coronary computed tomography angiography tests, especially in patients without lesions, with lesions < 50\% in the LMCA and < 70\% in the ADA. The agreement for lesions > 70\% in the ADA was lower, but this is also observed when the anatomical reference standard is used.}, issn = {1678-4170}, author = {Hadlich, Marcelo Souza and Oliveira, Gl{\'a}ucia Maria Moraes and Feij{\'o}o, Ra{\'u}l A and Azevedo, Clerio F and Tura, Bernardo Rangel and Ziemer, Paulo Gustavo Portela and Blanco, Pablo Javier and Pina, Gustavo and Meira, M{\'a}rcio and Souza E Silva, Nelson Albuquerque de} } @article {796, title = {Improving quality and use of data through data-use workshops: Zanzibar, United Republic of Tanzania.}, journal = {Bull World Health Organ}, volume = {90}, year = {2012}, month = {2012 May 1}, pages = {379-84}, abstract = {PROBLEM: In Zanzibar, United Republic of Tanzania, as in many developing countries, health managers lack faith in the national Health Management Information System (HMIS). The establishment of parallel data collection systems generates a vicious cycle: national health data are used little because they are of poor quality, and their relative lack of use, in turn, makes their quality remain poor. APPROACH: An action research approach was applied to strengthen the use of information and improve data quality in Zanzibar. The underlying premise was that encouraging use in small incremental steps could help to break the vicious cycle and improve the HMIS. LOCAL SETTING: To test the hypothesis at the national and district levels a project to strengthen the HMIS was established in Zanzibar. The project included quarterly data-use workshops during which district staff assessed their own routine data and critiqued their colleagues{\textquoteright} data. RELEVANT CHANGES: The data-use workshops generated inputs that were used by District Health Information Software developers to improve the tool. The HMIS, which initially covered only primary care outpatients and antenatal care, eventually grew to encompass all major health programmes and district and referral hospitals. The workshops directly contributed to improvements in data coverage, data set quality and rationalization, and local use of target indicators. LESSONS LEARNT: Data-use workshops with active engagement of data users themselves can improve health information systems overall and enhance staff capacity for information use, presentation and analysis for decision-making.}, issn = {1564-0604}, doi = {10.2471/BLT.11.099580}, author = {Braa, J{\o}rn and Heywood, Arthur and Sahay, Sundeep} } @article {801, title = {Integration of the Image-Guided Surgery Toolkit (IGSTK) into the Medical Imaging Interaction Toolkit (MITK).}, journal = {J Digit Imaging}, year = {2012}, month = {2012 Apr 26}, abstract = {The development cycle of an image-guided surgery navigation system is too long to meet current clinical needs. This paper presents an integrated system developed by the integration of two open-source software (IGSTK and MITK) to shorten the development cycle of the image-guided surgery navigation system and save human resources simultaneously. An image-guided surgery navigation system was established by connecting the two aforementioned open-source software libraries. It used the Medical Imaging Interaction Toolkit (MITK) as a framework providing image processing tools for the image-guided surgery navigation system of medical imaging software with a high degree of interaction and used the Image-Guided Surgery Toolkit (IGSTK) as a library that provided the basic components of the system for location, tracking, and registration. The electromagnetic tracking device was used to measure the real-time position of surgical tools and fiducials attached to the patient{\textquoteright}s anatomy. IGSTK was integrated into MITK; at the same time, the compatibility and the stability of this system were emphasized. Experiments showed that an integrated system of the image-guided surgery navigation system could be developed in 2~months. The integration of IGSTK into MITK is feasible. Several techniques for 3D reconstruction, geometric analysis, mesh generation, and surface data analysis for medical image analysis of MITK can connect with the techniques for location, tracking, and registration of IGSTK. This integration of advanced modalities can decrease software development time and emphasize the precision, safety, and robustness of the image-guided surgery navigation system.}, issn = {1618-727X}, doi = {10.1007/s10278-012-9477-3}, author = {Lu, Tong and Liang, Ping and Wu, Wen-Bo and Xue, Jin and Lei, Cheng-Long and Li, Yin-Yan and Sun, Yun-Na and Liu, Fang-Yi} } @conference {838, title = {Interactive Electromechanical Model of the Heart for Patient-Specific Therapy Planning and Training using SOFA}, booktitle = {VPH 2012}, year = {2012}, address = {Londres, United Kingdom}, abstract = {The contributions of this work are twofold. First, we developed an electrophysiological training simulator in SOFA which tackles the interactive issue in the context of cardiac arrhythmias. Coupled with this electrophysiology, we developed a mechanical model of the heart that can be personalized from MRI datasets. Our simulations are based on the SOFA platform. SOFA is an open-source framework targeted at real-time simulation with an emphasis on medical simulation, mainly developed at Inria. A large choice of efficient solvers, hyperelastic or viscous material laws are already implemented in SOFA. Moreover, it enables interactivity during the simulation (pacing, surgery planning, ...) and gives a good trade-off between accuracy and computational efficiency.}, url = {http://hal.inria.fr/hal-00751537}, author = {Talbot, Hugo and Marchesseau, St{\'e}phanie and Duriez, Christian and Courtecuisse, Hadrien and Relan, Jatin and Sermesant, Maxime and Cotin, St{\'e}phane and Delingette, Herv{\'e}} } @article {820, title = {MITK Diffusion Imaging.}, journal = {Methods Inf Med}, volume = {51}, year = {2012}, month = {2012 Oct 11}, pages = {441-8}, abstract = {Background: Diffusion-MRI provides a unique window on brain anatomy and insights into aspects of tissue structure in living humans that could not be studied previously. There is a major effort in this rapidly evolving field of research to develop the algorithmic tools necessary to cope with the complexity of the datasets. Objectives: This work illustrates our strategy that encompasses the development of a modularized and open software tool for data processing, visualization and interactive exploration in diffusion imaging research and aims at reinforcing sustainable evaluation and progress in the field. Methods: In this paper, the usability and capabilities of a new application and toolkit component of the Medical Imaging and Interaction Toolkit (MITK, www.mitk.org), MITK-DI, are demonstrated using in-vivo datasets. Results: MITK-DI provides a comprehensive software framework for high-performance data processing, analysis and interactive data exploration, which is designed in a modular, extensible fashion (using CTK) and in adherence to widely accepted coding standards (e.g. ITK, VTK). MITK-DI is available both as an open source software development toolkit and as a ready-to-use installable application. Conclusions: The open source release of the modular MITK-DI tools will increase verifiability and comparability within the research community and will also be an important step towards bringing many of the current techniques towards clinical application.}, issn = {0026-1270}, doi = {10.3414/ME11-02-0031}, author = {Fritzsche, K H and Neher, P F and Reicht, I and van Bruggen, T and Goch, C and Reisert, M and Nolden, M and Zelzer, S and Meinzer, H-P and Stieltjes, B} } @conference {785, title = {MITK global tractography}, booktitle = {Medical Imaging 2012: Image Processing}, year = {2012}, publisher = {SPIE}, organization = {SPIE}, abstract = {Fiber tracking algorithms yield valuable information for neurosurgery as well as automated diagnostic approaches. However, they have not yet arrived in the daily clinical practice. In this paper we present an open source integration of the global tractography algorithm proposed by Reisert et.al.1 into the open source Medical Imaging Interaction Toolkit (MITK) developed and maintained by the Division of Medical and Biological Informatics at the German Cancer Research Center (DKFZ). The integration of this algorithm into a standardized and open development environment like MITK enriches accessibility of tractography algorithms for the science community and is an important step towards bringing neuronal tractography closer to a clinical application. The MITK diffusion imaging application, downloadable from www.mitk.org, combines all the steps necessary for a successful tractography: preprocessing, reconstruction of the images, the actual tracking, live monitoring of intermediate results, postprocessing and visualization of the final tracking results. This paper presents typical tracking results and demonstrates the steps for pre- and post-processing of the images.}, doi = {10.1117/12.911215}, url = {http://link.aip.org/link/?PSI/8314/83144D/1}, author = {Peter F. Neher and Bram Stieltjes and Marco Reisert and Ignaz Reicht and Meinzer, Hans-Peter and Klaus H. Fritzsche}, editor = {David R. Haynor and Sebastien Ourselin} } @article {908, title = {The National Alliance for Medical Image Computing, a roadmap initiative to build a free and open source software infrastructure for translational research in medical image analysis}, journal = {Journal of the American Medical Informatics Association}, volume = {19}, year = {2012}, pages = {176{\textendash}180}, abstract = {The National Alliance for Medical Image Computing (NA-MIC), is a multi-institutional, interdisciplinary community of researchers, who share the recognition that modern health care demands improved technologies to ease suffering and prolong productive life. Organized under the National Centers for Biomedical Computing 7 years ago, the mission of NA-MIC is to implement a robust and flexible open-source infrastructure for developing and applying advanced imaging technologies across a range of important biomedical research disciplines. A measure of its success, NA-MIC is now applying this technology to diseases that have immense impact on the duration and quality of life: cancer, heart disease, trauma, and degenerative genetic diseases. The targets of this technology range from group comparisons to subject-specific analysis.}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-84857161947\&partnerID=40\&md5=c7d7cbab9304114a219812cb7c5091ee}, author = {Kapur, T.a and Pieper, S.b and Whitaker, R.c and Aylward, S.d and Jakab, M.a and Schroeder, W.d and Kikinis, R.a} } @article {898, title = {Opportunities and Challenges of Open Source Software Integration in Developing Countries: Case of Zanzibar Health Sector}, journal = {Journal of Health Informatics in Developing Countries}, volume = {6}, year = {2012}, abstract = {This paper describes the opportunities and related challenges of integrating Open Source Software process in organization. It focuses on technical and organizational part of the integration practice in Open Source Software philosophy. Thus, several issues about opportunities and challenges of integration have been identified, including the homogeneity of the systems in place and cooperation nature of integration. In essence, integration has proved to be the solution to several technical and administrative capacity problems that face health sector in Zanzibar.}, keywords = {heterogeneous., homogeneous, ntegration process, open source software}, author = {BAKAR, Abubakar and SHEIKH, Yahya and SULTAN, Bakar} } @inbook {794, title = {Overview of the Most Important Open Source Software: Analysis of the Benefits of OpenMRS, OpenEMR, and VistA}, booktitle = {Telemedicine and E-Health Services, Policies, and Applications: Advancements and Developments}, year = {2012}, pages = {315{\textendash}346}, publisher = {IGI Global}, organization = {IGI Global}, address = {Hershey, PA, USA}, abstract = {In this chapter, the authors review software that enables the proper management of EHR. The different types of software share the feature of being open source and offer the best opportunity in health care to developing countries{\textemdash}an overall integrated approach. The authors analyze the main free software programs (technical features, programming languages, places for introduction, etc.). Then they focus on the description and the comparison of the three most important open source software programs EHR (OpenMRS, OpenVistA, and OpenEMR) that are installed on two operating systems (Linux Ubuntu and Windows). Finally, the authors show the results of the various parameters measured in these systems after using different Web browsers. The results show us how the three main EHR applications work depending on which operating system is installed and which web browser is used.}, isbn = {9781466608887}, doi = {10.4018/978-1-4666-0888-7.ch012}, url = {http://services.igi-global.com/resolvedoi/resolve.aspx?doi=10.4018/978-1-4666-0888-7.ch012}, author = {Sainz de Abajo, Beatriz and Agust{\'\i}n Llamas Ballestero} } @article {828, title = {A quick guide to software licensing for the scientist-programmer.}, journal = {PLoS Comput Biol}, volume = {8}, year = {2012}, month = {2012}, pages = {e1002598}, issn = {1553-7358}, doi = {10.1371/journal.pcbi.1002598}, author = {Morin, Andrew and Urban, Jennifer and Sliz, Piotr} } @article {1050, title = {S2DIA: a diagnostic system for Diabetes mellitus using SANA platform.}, journal = {Conf Proc IEEE Eng Med Biol Soc}, volume = {2012}, year = {2012}, month = {2012}, pages = {6078-81}, abstract = {

Currently, Diabetes is a very common disease around the world, and with an increase in sedentary lifestyles, obesity and an aging population the number of people with Diabetes worldwide will increase by more than 50\%. In this context, the MIT (Massachusetts Institute of Technology) developed the SANA platform, which brings the benefits of information technology to the field of healthcare. It offers healthcare delivery in remote areas, improves patient access to medical specialists for faster, higher quality, and more cost effective diagnosis and intervention. For these reasons, we developed a system for diagnosis of Diabetes using the SANA platform, called S2DIA. It is the first step towards knowing the risks for type 2 Diabetes, and it will be evaluated, especially, in remote/poor areas of Brazil.

}, keywords = {Diabetes Mellitus, Type 2, Diagnosis, Computer-Assisted, Humans, Risk Factors}, issn = {1557-170X}, doi = {10.1109/EMBC.2012.6347380}, author = {Costa, Clayton M and Gondim, Dikson D and Gondim, Dibson D and Soares, Heliana B and Ribeiro, Anna G C D and Silva, Ikaro and Winkler, Erick and Celi, Leo and Guerreiro, Ana M G and Leite, Cic{\'\i}lia R M} } @conference {829, title = {A Standards-Based Open Source Application to Gather Health Assessment Data in Developing Countries}, booktitle = {Global Humanitarian Technology Conference (GHTC), 2012 IEEE}, year = {2012}, month = {oct.}, abstract = {Many organizations are working in developing coun- tries to support local health care organizations and infrastructure to provide sustainable, community-based health care. This requires not only the influx of medical staff and supplies, but also requires maintaining individual health care records and enabling the ability to collect, analyze and aggregate data in the field to customize care to the local needs of the community, and to provide continuity of care to its citizens. The recent rise of adoption of standards for electronic health records(EHR) provides an alternative to using paper forms in mobile health clinics that often serve these countries. In this paper, we describe an open-source, standards-based health assessment software application developed by the non-profit organization Health Records For Everyone (HR4E) and field tested in a mobile health clinic in rural Ethiopia in the fall of 2011. The application allows mobile health clinic staff to quickly deploy medical clinics and collect patient data electronically in the face of various environmental challenges. In addition to producing electronic patient records which are validated in-field using HL7{\textquoteright}s Clinical Document Architecture standard, the application allows medical practitioners to view and summarize patient data for in-field analysis.}, keywords = {and Telemedicine, Ash, Communities, Computers, Health, Medical services, Medical Technology, Mobile communication, Software, Standards}, doi = {10.1109/GHTC.2012.78}, author = {Gainer, Alex and Roth, Mary and Strong, Phil and Davis, James} } @article {1036, title = {A static-image telepathology system for dermatopathology consultation in East Africa: the Massachusetts General Hospital Experience.}, journal = {J Am Acad Dermatol}, volume = {67}, year = {2012}, month = {2012 Nov}, pages = {997-1007}, abstract = {

BACKGROUND: The histologic diagnosis of skin lesions in the developing world is complicated by the shortage of pathologists with subspecialty training in dermatopathology, limited access to ancillary diagnostic testing, and costly referrals for expert glass slide consultation in challenging cases.

OBJECTIVE: In this study we evaluate the feasibility of a static-image telepathology platform in Africa for performing accurate dermatopathology consultations.

METHODS: A static-image telepathology platform using the iPath server was utilized by referring pathologists in 4 African hospitals. Diagnostic interpretations were provided by Massachusetts General Hospital dermatopathologists at no cost. The diagnostic accuracy and interobserver correlation was evaluated.

RESULTS: The static histopathologic images were diagnostic in 22 of 29 (76\%) cases. Diagnostic accuracy between static image and glass slide diagnosis in 22 cases was 91\%, ranging from 86\% to 95\% according to years of dermatopathology subspecialty expertise. Comparison with the glass slides showed that the telepathology diagnosis was limited by inappropriate field selection in only one case. Interobserver concordance between two pathologists was high (K = 0.86) suggesting that this platform is easy to use with minimal training of both referring and consulting pathologists.

LIMITATIONS: Concordance between conventional microscopy and static image telepathology was performed in 22 of 29 cases for which glass slides were received. Interobserver concordance was performed for two pathologists.

CONCLUSION: Static-image telepathology is a feasible means of rendering diagnoses on dermatopathology cases and is a cost-effective technology for obtaining much-needed second opinions in resource-poor settings.

}, keywords = {Africa, Eastern, Cost-Benefit Analysis, Feasibility Studies, Hospitals, General, Humans, Massachusetts, Microscopy, Observer Variation, Pathology, Clinical, Remote Consultation, Skin Diseases, Skin Neoplasms, telepathology}, issn = {1097-6787}, doi = {10.1016/j.jaad.2011.12.036}, author = {Gimbel, Devon C and Sohani, Aliyah R and Prasad Busarla, Satya Vara and Kirimi, Jesca Muthoni and Sayed, Shahin and Okiro, Patricia and Nazarian, Rosalynn M} } @article {844, title = {Towards an international electronic repository and virtual laboratory of open data and open-source software for telehealth research: comparison of international, Australian and finnish privacy policies.}, journal = {Stud Health Technol Inform}, volume = {182}, year = {2012}, month = {2012}, pages = {153-60}, abstract = {

Health data includes all content related to health in all data formats, document types, information systems, publication media and languages from all specialties, organisations, regions, states and countries. Capabilities to share, integrate and compare these data contents, clinical trial results and other evaluation outcomes together with telehealth applications for data processing are critical to accelerate discovery and its diffusion to clinical practice. However, the same ethical and legal frameworks that protect privacy hinder this open data and open-source code approach and the issues accumulate if moving data across national, regional or organisational borders. This can be seen as one of the reasons why many telehealth applications and health-research findings tend to be limited to very narrow domains and global results are lacking. The aim of this paper is to take steps towards establishing an international electronic repository and virtual laboratory of open data and open-source code for research purposes by comparing international, Australian and Finnish frameworks. The frameworks seem to be fundamentally similar; they apply the principles of accountability and adequacy to using and disclosing personal data. Their requirements to inform data subjects about the purposes of data collection and use before the dataset is collected, assure that individuals are no longer identifiable and to destruct data when the research activities are finished make sharing data and even secondary data difficult. Using the Internet or cloud services for sharing without proper approvals by ethics committees is technically not allowed if the data are stored in another country. The research community needs to overcome these barriers and develop a virtual laboratory, which operates on distributed data repositories. This empowers the community by enabling systematic evaluations of new technologies and research hypotheses on a rich variety of data and against existing applications, and subsequent tracking of quality improvements in time.

}, issn = {0926-9630}, author = {Suominen, Hanna} } @article {888, title = {The use and role of open source software applications in public and not-for-profit hospitals in the United States.}, journal = {Health Care Manage Rev}, year = {2012}, month = {2012 Oct 31}, abstract = {

BACKGROUND:: The potential cost savings and customizability of open source software (OSS) may be particularly attractive for hospitals. However, numerous health-care-specific OSS applications exist, the adoption of OSS health information technology (HIT) applications is not widespread in the United States. PURPOSE:: This disconnect between the availability of promising software and low adoption raises the basic question: If OSS HIT is so advantageous, why are more health care organizations not using it? METHODOLOGY:: We interviewed the chief information officer, or equivalent position, at 17 not-for-profit and public hospitals across the United States. Through targeted recruitment, our sample included nine hospitals using OSS HIT and eight hospitals not using OSS HIT. The open-ended interview questions were guided by domains included in the fit-viability theory, an organizational-level innovation adoption framework, and those suggested by a review of the literature. Transcripts were analyzed using an inductive and comparative approach, which involved an open coding for relevant themes. FINDINGS:: Interviews described the state of OSS use in hospitals. Specifically, general OSS applications were widely used by IT professionals. In addition, hospitals using OSS HIT still relied heavily on vendor support. In terms of why decisions arose to use OSS HIT, several hospitals using OSS HIT noted the cost advantages. In contrast, hospitals avoiding OSS HIT were clear, OSS as a class did not fit with clinical work and posed too much risk. PRACTICE IMPLICATIONS:: Perceptions of OSS HIT ranged from enthusiastic embracement to resigned adoption, to refusal, to abandonment. Some organizations were achieving success with their OSS HIT choices, but they still relied on vendors for significant support. The decision to adopt OSS HIT was not uniform but contingent upon views of the risk posed by the technology, economic factors, and the hospital{\textquoteright}s existing capabilities.

}, issn = {1550-5030}, doi = {10.1097/HMR.0b013e318276f9ed}, author = {Vest, Joshua R and Stephens, James H} } @inbook {786, title = {VURTIGO: Visualization Platform for Real-Time, MRI-Guided Cardiac Electroanatomic Mapping}, booktitle = {Statistical Atlases and Computational Models of the Heart. Imaging and Modelling Challenges}, series = {Lecture Notes in Computer Science}, volume = {7085}, year = {2012}, pages = {244-253}, publisher = {Springer Berlin / Heidelberg}, organization = {Springer Berlin / Heidelberg}, abstract = {Guidance of electrophysiological (EP) procedures by magnetic resonance imaging (MRI) has significant advantages over x-ray fluoroscopy. Display of electroanatomic mapping (EAM) during an intervention fused with a prior MR volume and DE-MRI derived tissue classification should improve the accuracy of cardiac resynchronization therapy (CRT) for ventricular arrhythmias. Improved accuracy in the spatial localization of recorded EP points will produce an EAM to constrain and customize patient-specific cardiac electroanatomic models being developed for understanding the patterns of arrhythmogenic slow conduction zones causing reentry circuits and treatment planning. The Vurtigo software presented here is a four dimensional (3D+time) real-time visualization application for guiding interventions capable of displaying prior volumes, real-time MRI scan planes, EAM (voltage or activation times), segmented models, and tracked catheters. This paper will describe the architecture and features of Vurtigo followed by the application example of guiding percutaneous cardiac electroanatomic mapping in porcine models.}, isbn = {978-3-642-28325-3}, url = {http://dx.doi.org/10.1007/978-3-642-28326-0_25}, author = {Radau, Perry and Pintilie, Stefan and Flor, Roey and Biswas, Labonny and Oduneye, Samuel and Ramanan, Venkat and Anderson, Kevan and Wright, Graham}, editor = {Camara, Oscar and Konukoglu, Ender and Pop, Mihaela and Rhode, Kawal and Sermesant, Maxime and Young, Alistair} } @article {810, title = {Arden2ByteCode: A one-pass Arden Syntax compiler for service-oriented decision support systems based on the OSGi platform.}, journal = {Comput Methods Programs Biomed}, year = {2011}, doi = {10.1016/j.cmpb.2011.11.003}, url = {http://dx.doi.org/10.1016/j.cmpb.2011.11.003} note = {Epub ahead of print}, author = {Gietzelt, Matthias and Goltz, Ursula and Grunwald, Daniel and Lochau, Malte and Marschollek, Michael and Song, Bianying and Wolf, Klaus-Hendrik} } @article {868, title = {The caBIG{\textregistered} Life Science Business Architecture Model.}, journal = {Bioinformatics}, volume = {27}, year = {2011}, month = {2011 May 15}, pages = {1429-35}, abstract = {

MOTIVATION: Business Architecture Models (BAMs) describe what a business does, who performs the activities, where and when activities are performed, how activities are accomplished and which data are present. The purpose of a BAM is to provide a common resource for understanding business functions and requirements and to guide software development. The cancer Biomedical Informatics Grid (caBIG{\textregistered}) Life Science BAM (LS BAM) provides a shared understanding of the vocabulary, goals and processes that are common in the business of LS research.

RESULTS: LS BAM 1.1 includes 90 goals and 61 people and groups within Use Case and Activity Unified Modeling Language (UML) Diagrams. Here we report on the model{\textquoteright}s current release, LS BAM 1.1, its utility and usage, and plans for future use and continuing development for future releases. Availability and Implementation: The LS BAM is freely available as UML, PDF and HTML (https://wiki.nci.nih.gov/x/OFNyAQ).

}, keywords = {Biomedical Research, Computational Biology, Computer Systems, National Cancer Institute (U.S.), Neoplasms, Software, United States, Vocabulary, Controlled}, issn = {1367-4811}, doi = {10.1093/bioinformatics/btr141}, author = {Boyd, Lauren Becnel and Hunicke-Smith, Scott P and Stafford, Grace A and Freund, Elaine T and Ehlman, Michele and Chandran, Uma and Dennis, Robert and Fernandez, Anna T and Goldstein, Stephen and Steffen, David and Tycko, Benjamin and Klemm, Juli D} } @article {1047, title = {[Central online quality assurance in radiology: an IT solution exemplified by the German Breast Cancer Screening Program].}, journal = {Rofo}, volume = {183}, year = {2011}, month = {2011 Sep}, pages = {849-54}, abstract = {

PURPOSE: Physical-technical quality assurance is one of the essential tasks of the National Reference Centers in the German Breast Cancer Screening Program. For this purpose the mammography units are required to transfer the measured values of the constancy tests on a daily basis and all phantom images created for this purpose on a weekly basis to the reference centers. This is a serious logistical challenge. To meet these requirements, we developed an innovative software tool.

MATERIALS AND METHODS: By the end of 2005, we had already developed web-based software (MammoControl) allowing the transmission of constancy test results via entry forms. For automatic analysis and transmission of the phantom images, we then introduced an extension (MammoControl DIANA). This was based on Java, Java Web Start, the NetBeans Rich Client Platform, the Pixelmed Java DICOM Toolkit and the ImageJ library.

RESULTS: MammoControl DIANA was designed to run locally in the mammography units. This allows automated on-site image analysis. Both results and compressed images can then be transmitted to the reference center. We developed analysis modules for the daily and monthly consistency tests and additionally for a homogeneity test.

CONCLUSION: The software we developed facilitates the immediate availability of measurement results, phantom images, and DICOM header data in all reference centers. This allows both targeted guidance and short response time in the case of errors. We achieved a consistent IT-based evaluation with standardized tools for the entire screening program in Germany.

}, keywords = {Data Compression, Female, Germany, Guideline Adherence, Humans, Image Processing, Computer-Assisted, Mammography, Mass Screening, Online Systems, Phantoms, Imaging, Quality Assurance, Health Care, Radiology Information Systems, Reference Standards, Software}, issn = {1438-9010}, doi = {10.1055/s-0031-1281599}, author = {Czwoydzinski, J and Girnus, R and Sommer, A and Heindel, W and Lenzen, H} } @article {869, title = {DDN: a caBIG{\textregistered} analytical tool for differential network analysis.}, journal = {Bioinformatics}, volume = {27}, year = {2011}, month = {2011 Apr 1}, pages = {1036-8}, abstract = {

UNLABELLED: Differential dependency network (DDN) is a caBIG{\textregistered} (cancer Biomedical Informatics Grid) analytical tool for detecting and visualizing statistically significant topological changes in transcriptional networks representing two biological conditions. Developed under caBIG{\textregistered}{\textquoteright}s In Silico Research Centers of Excellence (ISRCE) Program, DDN enables differential network analysis and provides an alternative way for defining network biomarkers predictive of phenotypes. DDN also serves as a useful systems biology tool for users across biomedical research communities to infer how genetic, epigenetic or environment variables may affect biological networks and clinical phenotypes. Besides the standalone Java application, we have also developed a Cytoscape plug-in, CytoDDN, to integrate network analysis and visualization seamlessly.

AVAILABILITY: The Java and MATLAB source code can be downloaded at the authors{\textquoteright} web site http://www.cbil.ece.vt.edu/software.htm.

}, keywords = {Animals, Computational Biology, Epigenesis, Genetic, Female, Gene Regulatory Networks, Mammary Glands, Animal, Rats, Software, Systems Biology}, issn = {1367-4811}, doi = {10.1093/bioinformatics/btr052}, author = {Zhang, Bai and Tian, Ye and Jin, Lu and Li, Huai and Shih, Ie-Ming and Madhavan, Subha and Clarke, Robert and Hoffman, Eric P and Xuan, Jianhua and Hilakivi-Clarke, Leena and Wang, Yue} } @article {Ferranti23092011, title = {The design and implementation of an open-source, data-driven cohort recruitment system: the Duke Integrated Subject Cohort and Enrollment Research Network (DISCERN)}, journal = {Journal of the American Medical Informatics Association}, year = {2011}, abstract = {Objective Failure to reach research subject recruitment goals is a significant impediment to the success of many clinical trials. Implementation of health-information technology has allowed retrospective analysis of data for cohort identification and recruitment, but few institutions have also leveraged real-time streams to support such activities.Design Duke Medicine has deployed a hybrid solution, The Duke Integrated Subject Cohort and Enrollment Research Network (DISCERN), that combines both retrospective warehouse data and clinical events contained in prospective Health Level 7 (HL7) messages to immediately alert study personnel of potential recruits as they become eligible.Results DISCERN analyzes more than 500{\quotesinglbase}{\"A}{\`a}000 messages daily in service of 12 projects. Users may receive results via email, text pages, or on-demand reports. Preliminary results suggest DISCERN{\textquoteright}s unique ability to reason over both retrospective and real-time data increases study enrollment rates while reducing the time required to complete recruitment-related tasks. The authors have introduced a preconfigured DISCERN function as a self-service feature for users.Limitations The DISCERN framework is adoptable primarily by organizations using both HL7 message streams and a data warehouse. More efficient recruitment may exacerbate competition for research subjects, and investigators uncomfortable with new technology may find themselves at a competitive disadvantage in recruitment.Conclusion DISCERN{\textquoteright}s hybrid framework for identifying real-time clinical events housed in HL7 messages complements the traditional approach of using retrospective warehoused data. DISCERN is helpful in instances when the required clinical data may not be loaded into the warehouse and thus must be captured contemporaneously during patient care. Use of an open-source tool supports generalizability to other institutions at minimal cost.}, doi = {10.1136/amiajnl-2011-000115}, url = {http://jamia.bmj.com/content/early/2011/09/23/amiajnl-2011-000115.abstract}, author = {Ferranti, Jeffrey M and Gilbert, William and McCall, Jonathan and Shang, Howard and Barros, Tanya and Horvath, Monica M} } @article {660, title = {Dicoogle - an Open Source Peer-to-Peer PACS.}, journal = {Journal of digital imaging : the official journal of the Society for Computer Applications in Radiology}, volume = {24}, year = {2011}, month = {2011 Oct}, pages = {848-56}, abstract = {Picture Archiving and Communication Systems (PACS) have been widely deployed in healthcare institutions, and they now constitute a normal commodity for practitioners. However, its installation, maintenance, and utilization are still a burden due to their heavy structures, typically supported by centralized computational solutions. In this paper, we present Dicoogle, a PACS archive supported by a document-based indexing system and by peer-to-peer (P2P) protocols. Replacing the traditional database storage (RDBMS) by a documental organization permits gathering and indexing data from file-based repositories, which allows searching the archive through free text queries. As a direct result of this strategy, more information can be extracted from medical imaging repositories, which clearly increases flexibility when compared with current query and retrieval DICOM services. The inclusion of P2P features allows PACS internetworking without the need for a central management framework. Moreover, Dicoogle is easy to install, manage, and use, and it maintains full interoperability with standard DICOM services.}, issn = {1618-727X}, doi = {10.1007/s10278-010-9347-9}, author = {Costa, Carlos and Ferreira, Carlos and Basti{\~a}o, Lu{\'\i}s and Ribeiro, Lu{\'\i}s and Silva, Augusto and Oliveira, Jos{\'e} Lu{\'\i}s} } @article {1746-1596-6-S1-S16, title = {Distributed computing in image analysis using open source frameworks and application to image sharpness assessment of histological whole slide images}, journal = {Diagnostic Pathology}, volume = {6}, number = {Suppl 1}, year = {2011}, pages = {S16}, abstract = {BACKGROUND:Automated image analysis on virtual slides is evolving rapidly and will play an important role in the future of digital pathology. Due to the image size, the computational cost of processing whole slide images (WSIs) in full resolution is immense. Moreover, image analysis requires well focused images in high magnification.METHODS:We present a system that merges virtual microscopy techniques, open source image analysis software, and distributed parallel processing. We have integrated the parallel processing framework JPPF, so batch processing can be performed distributed and in parallel. All resulting meta data and image data are collected and merged. As an example the system is applied to the specific task of image sharpness assessment. ImageJ is an open source image editing and processing framework developed at the NIH having a large user community that contributes image processing algorithms wrapped as plug-ins in a wide field of life science applications. We developed an ImageJ plug-in that supports both basic interactive virtual microscope and batch processing functionality. For the application of sharpness inspection we employ an approach with non-overlapping tiles. Compute nodes retrieve image tiles of moderate size from the streaming server and compute the focus measure. Each tile is divided into small sub images to calculate an edge based sharpness criterion which is used for classification. The results are aggregated in a sharpness map.RESULTS:Based on the system we calculate a sharpness measure and classify virtual slides into one of the following categories - excellent, okay, review and defective. Generating a scaled sharpness map enables the user to evaluate sharpness of WSIs and shows overall quality at a glance thus reducing tedious assessment work.CONCLUSIONS:Using sharpness assessment as an example, the introduced system can be used to process, analyze and parallelize analysis of whole slide images based on open source software.}, issn = {1746-1596}, doi = {10.1186/1746-1596-6-S1-S16}, url = {http://www.diagnosticpathology.org/content/6/S1/S16}, author = {Zerbe, Norman and Hufnagl, Peter and Schluns, Karsten} } @article {589, title = {FieldTrip: Open source software for advanced analysis of MEG, EEG, and invasive electrophysiological data.}, journal = {Computational intelligence and neuroscience}, volume = {2011}, year = {2011}, month = {2011}, pages = {156869}, abstract = {This paper describes FieldTrip, an open source software package that we developed for the analysis of MEG, EEG, and other electrophysiological data. The software is implemented as a MATLAB toolbox and includes a complete set of consistent and user-friendly high-level functions that allow experimental neuroscientists to analyze experimental data. It includes algorithms for simple and advanced analysis, such as time-frequency analysis using multitapers, source reconstruction using dipoles, distributed sources and beamformers, connectivity analysis, and nonparametric statistical permutation tests at the channel and source level. The implementation as toolbox allows the user to perform elaborate and structured analyses of large data sets using the MATLAB command line and batch scripting. Furthermore, users and developers can easily extend the functionality and implement new algorithms. The modular design facilitates the reuse in other software packages.}, keywords = {Electroencephalography, Electrophysiological Phenomena, Humans, Magnetoencephalography, Numerical Analysis, Computer-Assisted, Software, User-Computer Interface}, issn = {1687-5273}, author = {Oostenveld, Robert and Fries, Pascal and Maris, Eric and Schoffelen, Jan-Mathijs} } @article {642, title = {Implementation of an open source provider organization registry service.}, journal = {Studies in health technology and informatics}, volume = {169}, year = {2011}, month = {2011}, pages = {265-9}, abstract = {Healthcare Information Exchange Networks (HIEN) enables the exchange of medical information between different institutions. One of the biggest problems running a HIEN is the unique identification of the care providers. The provider and organisation registry service (PORS) has to provide a unique identifier for care providers. The concept and the implementation of PORS will be described in this article. Finally the PORS implementation will be compared with the Integrating the Healthcare Enterprise (IHE) profile for a Healthcare Provider Directory (HPD).}, issn = {0926-9630}, author = {Birkle, Markus and Schneider, Benjamin and Beck, Tobias and Deuster, Thomas and Fischer, Markus and Flatow, Florian and Heinrich, Robert and Kapp, Christian and Riemer, Jasmin and Simon, Michael and Bergh, Bj{\"o}rn} } @article {594, title = {Implementation of the Zambia electronic perinatal record system for comprehensive prenatal and delivery care.}, journal = {International journal of gynaecology and obstetrics: the official organ of the International Federation of Gynaecology and Obstetrics}, volume = {113}, year = {2011}, month = {2011 May}, pages = {131-6}, abstract = {OBJECTIVE: To characterize prenatal and delivery care in an urban African setting. METHODS: The Zambia Electronic Perinatal Record System (ZEPRS) was implemented to record demographic characteristics, past medical and obstetric history, prenatal care, and delivery and newborn care for pregnant women across 25 facilities in the Lusaka public health sector. RESULTS: From June 1, 2007, to January 31, 2010, 115552 pregnant women had prenatal and delivery information recorded in ZEPRS. Median gestation age at first prenatal visit was 23weeks (interquartile range [IQR] 19-26). Syphilis screening was documented in 95663 (83\%) pregnancies: 2449 (2.6\%) women tested positive, of whom 1589 (64.9\%) were treated appropriately. 111108 (96\%) women agreed to HIV testing, of whom 22\% were diagnosed with HIV. Overall, 112813 (98\%) of recorded pregnancies resulted in a live birth, and 2739 (2\%) in a stillbirth. The median gestational age was 38weeks (IQR 35-40) at delivery; the median birth weight of newborns was 3000g (IQR 2700-3300g). CONCLUSION: The results demonstrate the feasibility of using a comprehensive electronic medical record in an urban African setting, and highlight its important role in ongoing efforts to improve clinical care.}, issn = {1879-3479}, author = {Chi, Benjamin H and Vwalika, Bellington and Killam, William P and Wamalume, Chibesa and Giganti, Mark J and Mbewe, Reuben and Stringer, Elizabeth M and Chintu, Namwinga T and Putta, Nande B and Liu, Katherine C and Chibwesha, Carla J and Rouse, Dwight J and Stringer, Jeffrey S A} } @conference {MEJIA:2011:INRIA-00567598:1, title = {Invasive composition for the evolution of a health information system}, booktitle = {Variability \& Composition (VariComp)}, year = {2011}, note = {{ACM} 978-1-4503-0646-1/11/03 {D}.: {S}oftware/{D}.3: {PROGRAMMING} {LANGUAGES}/{D}.3.3: {L}anguage {C}onstructs and {F}eatures}, month = {03}, address = {Pernambuco Brazil}, abstract = {{I}n this paper we show that some of the evolution tasks in {O}pen{MRS}, a health information system, may require the invasive modification of interfaces and implementations in order to offer an appropriate modularization. {W}e introduce a new composition framework in {J}ava that supports the definition of expressive pattern-based invasive compositions. {F}ur thermore, we show that the composition framework allows us to concisely define an evolution scenario of {O}pen{MRS} that supports the consolidation of patient data from differ- ent remote instances.}, keywords = {Aspect-oriented programming, Distributed systems, Health information systems, Invasive software composition}, url = {http://hal.inria.fr/inria-00567598/PDF/mejia-sudholt-benavides_varicomp-AOSD2011.pdf}, author = {Mejia, Ismael and S{\"u}dholt, Mario and Benavides Navarro, Luis Daniel} } @inbook {springerlink:10.1007/978-1-4419-8204-9_9, title = {Medical Image Registration}, booktitle = {Multi Modality State-of-the-Art Medical Image Segmentation and Registration Methodologies}, year = {2011}, note = {10.1007/978-1-4419-8204-9_9}, pages = {227-245}, publisher = {Springer New York}, organization = {Springer New York}, abstract = {In this chapter, we cover the necessary background information required to understand medical image registration, the basic tools required to implement registration algorithms, and demonstrate a complete application for various types of registration between different modalities using freely available and maintained software.}, isbn = {978-1-4419-8204-9}, url = {http://dx.doi.org/10.1007/978-1-4419-8204-9_9}, author = {Aladl, Usaf E. and Peters, Terry}, editor = {El-Baz, Ayman S. and Acharya U, Rajendra and Laine, Andrew F. and Suri, Jasjit S.} } @article {591, title = {MITK-ToF-Range data within MITK.}, journal = {International journal of computer assisted radiology and surgery}, year = {2011}, month = {2011 May 31}, abstract = {PURPOSE: The time-of-flight (ToF) technique is an emerging technique for rapidly acquiring distance information and is becoming increasingly popular for intra-operative surface acquisition. Using the ToF technique as an intra-operative imaging modality requires seamless integration into the clinical workflow. We thus aim to integrate ToF support in an existing framework for medical image processing. METHODS: MITK-ToF was implemented as an extension of the open-source C++ Medical Imaging Interaction Toolkit (MITK) and provides the basic functionality needed for rapid prototyping and development of image-guided therapy (IGT) applications that utilize range data for intra-operative surface acquisition. This framework was designed with a module-based architecture separating the hardware-dependent image acquisition task from the processing of the range data. RESULTS: The first version of MITK-ToF has been released as an open-source toolkit and supports several ToF cameras and basic processing algorithms. The toolkit, a sample application, and a tutorial are available from http://mitk.org . CONCLUSIONS: With the increased popularity of time-of-flight cameras for intra-operative surface acquisition, integration of range data supports into medical image processing toolkits such as MITK is a necessary step. Handling acquisition of range data from different cameras and processing of the data requires the establishment and use of software design principles that emphasize flexibility, extendibility, robustness, performance, and portability. The open-source toolkit MITK-ToF satisfies these requirements for the image-guided therapy community and was already used in several research projects.}, issn = {1861-6429}, author = {Seitel, Alexander and Yung, Kwong and Mersmann, Sven and Kilgus, Thomas and Groch, Anja and Dos Santos, Thiago R and Franz, Alfred M and Nolden, Marco and Meinzer, Hans-Peter and Maier-Hein, Lena} } @article {1038, title = {An online method for diagnosis of difficult TB cases for developing countries.}, journal = {Stud Health Technol Inform}, volume = {164}, year = {2011}, month = {2011}, pages = {168-73}, abstract = {

Optimal use of limited human, technical and financial resources is a major concern for tuberculosis (TB) control in developing nations. Further impediments include a lack of trained physicians, and logistical difficulties in arranging face-to-face (f-2-f) TB Diagnostic Committee (TBDC) consultations. Use of e-Health for virtual TBDCs (Internet and "iPath"), to address such issues is being studied in the Philippines and Pakistan. In Pakistan, radiological diagnosis of 88 sputum smear negative but suspected TB patients has been compared with the {\textquoteright}gold standards{\textquoteright} (TB culture, and 2-month clinical follow up). Of 88 diagnostic decisions made by primary physicians at the spoke site and electronic TBDC (e-TBDC) at hub site, there was agreement in 71 cases and disagreement on 17 cases. The turn-around time (TAT; patient registration at spoke site for f-2-f diagnosis to receiving the electronic diagnosis), averaged 34.6 hours; ranging 9 minutes to 289.2 hours. Average TAT at the rural site (59.15 hours) was more than the urban site (15.9 hours). Comparison of e-TBDC and f-2-f diagnosis with the gold standards showed only slight differences. Using culture as the gold standard, e-TBDC decisions showed greater accuracy (sensitivity - 32.4\%) as compared to f-2-f (27.6\%); using 2-month clinical follow-up as the gold standard, f-2-f diagnosis showed slightly better improvement in patient symptoms and weight as compared to e-TBDC. In Philippines "iPath" was trialed and demonstrated that e-TBDCs have potential. Such groups could review cases, diagnose, and write comments remotely, reducing the diagnosis and treatment delay compared to usual care.

}, keywords = {Adult, Developing Countries, Diagnostic Techniques and Procedures, Female, Humans, Internet, Male, Middle Aged, Mycobacterium Infections, Nontuberculous, Pakistan, Philippines, Telemedicine, Tuberculosis, Pulmonary}, issn = {0926-9630}, author = {Marcelo, Alvin and Fatmi, Zafar and Firaza, Paul Nimrod and Shaikh, Shiraz and Dandan, Alvin Joseph and Irfan, Muhammad and Bari, Vaqar and Scott, Richard E} } @article {720, title = {OpenMRS, A Global Medical Records System Collaborative: Factors Influencing Successful Implementation.}, journal = {AMIA ... Annual Symposium proceedings / AMIA Symposium. AMIA Symposium}, volume = {2011}, year = {2011}, month = {2011}, pages = {960-968}, abstract = {OpenMRS is an open-source, robust electronic health record (EHR) platform that is supported by a large global network and used in over forty countries. We explored what factors lead to successful implementation of OpenMRS in resource constrained settings. Data sources included in-person and telephone key informant interviews, focus groups and responses to an electronic survey from 10 sites in 7 countries. Qualitative data was coded through independent coding, discussion and consensus. The most common perceived benefits of implementation were for providing clinical care, reporting to funders, managing operations and research. Successful implementation factors include securing adequate infrastructure, and sociotechnical system factors, particularly adequate staffing, computers, and ability to use software. Strategic and tactical planning were successful strategies, including understanding and addressing the infrastructure and human costs involved, training or hiring personnel technically capable of modifying the software and integrating it into the daily work flow to meet clinicians{\textquoteright} needs.}, issn = {1942-597X}, author = {Mohammed-Rajput, Nareesa A and Smith, Dawn C and Mamlin, Burke and Biondich, Paul and Doebbeling, Brad N} } @article {10.1109/ESEM.2011.11, title = {A Qualitative Study of Open Source Software Development: The Open EMR Project}, journal = {Empirical Software Engineering and Measurement, International Symposium on}, year = {2011}, pages = {30-39}, publisher = {IEEE Computer Society}, address = {Los Alamitos, CA, USA}, abstract = {Open Source software is competing successfully in many areas. The commercial sector is recognizing the benefits offered by Open Source development methods that lead to high quality software. Can these benefits be realized in specialized domains where expertise is rare? This study examined discussion forums of an Open Source project in a particular specialized application domain -- electronic medical records -- to see how development roles are carried out, and by whom. We found through a qualitative analysis that the core developers in this system include doctors and clinicians who also use the product. We also found that the size of the community associated with the project is an order of magnitude smaller than predicted, yet still maintains a high degree of responsiveness to issues raised by users. The implication is that a few experts and a small core of dedicated programmers can achieve success using an Open Source approach in a specialized domain.}, isbn = {978-0-7695-4604-9}, doi = {http://doi.ieeecomputersociety.org/10.1109/ESEM.2011.11}, author = {John Noll and Sarah Beecham and Dominik Seichter} } @article {1155, title = {The {SHARPn} project on secondary use of {Electronic} {Medical} {Record} data: progress, plans, and possibilities}, journal = {AMIA ... Annual Symposium proceedings. AMIA Symposium}, volume = {2011}, year = {2011}, pages = {248{\textendash}256}, abstract = {SHARPn is a collaboration among 16 academic and industry partners committed to the production and distribution of high-quality software artifacts that support the secondary use of EMR data. Areas of emphasis are data normalization, natural language processing, high-throughput phenotyping, and data quality metrics. Our work avails the industrial scalability afforded by the Unstructured Information Management Architecture (UIMA) from IBM Watson Research labs, the same framework which underpins the Watson Jeopardy demonstration. This descriptive paper outlines our present work and achievements, and presages our trajectory for the remainder of the funding period. The project is one of the four Strategic Health IT Advanced Research Projects (SHARP) projects funded by the Office of the National Coordinator in 2010.}, keywords = {Algorithms, Biomedical Research, Cooperative Behavior, Data Mining, electronic health records, Natural Language Processing, Software}, issn = {1942-597X}, author = {Chute, Christopher G. and Pathak, Jyotishman and Savova, Guergana K. and Bailey, Kent R. and Schor, Marshall I. and Hart, Lacey A. and Beebe, Calvin E. and Huff, Stanley M.} } @article {640, title = {Towards Open Collaborative Health Informatics - The Role of Free/Libre Open Source Principles. Contribution of the IMIA Open Source Health Informatics Working Group.}, journal = {Yearbook of medical informatics}, volume = {6}, year = {2011}, month = {2011}, pages = {63-72}, abstract = {OBJECTIVES: : To analyze the contribution of Free/Libre Open Source Software in health care (FLOSS-HC) and to give perspectives for future developments. METHODS: The paper summarizes FLOSS-related trends in health care as anticipated by members of the IMIA Open Source Working Group. Data were obtained through literature review and personal experience and observations of the authors in the last two decades. A status quo is given by a frequency analysis of the database of Medfloss.org, one of the world{\textquoteright}s largest platforms dedicated to FLOSS-HC. The authors discuss current problems in the field of health care and finally give a prospective roadmap, a projection of the potential influences of FLOSS in health care. RESULTS: FLOSS-HC already exists for more than 2 decades. Several projects have shown that FLOSS may produce highly competitive alternatives to proprietary solutions that are at least equivalent in usability and have a better total cost of ownership ratio. The Medfloss.org database currently lists 221 projects of diverse application types. CONCLUSIONS: FLOSS principles hold a great potential for addressing several of the most critical problems in health care IT. The authors argue that an ecosystem perspective is relevant and that FLOSS principles are best suited to create health IT systems that are able to evolve over time as medical knowledge, technologies, insights, workflows etc. continuously change. All these factors that inherently influence the development of health IT systems are changing at an ever growing pace. Traditional models of software engineering are not able to follow these changes and provide up-to-date systems for an acceptable cost/value ratio. To allow FLOSS to positively influence Health IT in the future a "FLOSS-friendly" environment has to be provided. Policy makers should resolve uncertainties in the legal framework that disfavor FLOSS. Certification procedures should be specified in a way that they do not raise additional barriers for FLOSS.}, issn = {0943-4747}, author = {Karopka, T and Schmuhl, H and Marcelo, A and Molin, J Dal and Wright, G} } @article {536, title = {Tracking and monitoring the health workforce: a new human resources information system (HRIS) in Uganda.}, journal = {Human resources for health}, volume = {9}, year = {2011}, month = {2011 Feb 17}, pages = {6}, abstract = {ABSTRACT: BACKGROUND: Health workforce planning is important in ensuring that the recruitment, training and deployment of health workers are conducted in the most efficient way possible. However, in many developing countries, human resources for health data are limited, inconsistent, out-dated, or unavailable. Consequently, policy-makers are unable to use reliable data to make informed decisions about the health workforce. Computerized human resources information systems (HRIS) enable countries to collect, maintain, and analyze health workforce data. METHODS: The purpose of this article is twofold. First, we describe Uganda{\textquoteright}s transition from a paper filing system to an electronic HRIS capable of providing information about country-specific health workforce questions. We examine the ongoing five-step HRIS strengthening process used to implement an HRIS that tracks health worker data at the Uganda Nurses and Midwives Council (UNMC). Secondly, we describe how HRIS data can be used to address workforce planning questions via an initial analysis of the UNMC training, licensure and registration records from 1970 through May 2009. RESULTS: The data indicate that, for the 25 482 nurses and midwives who entered training before 2006, 72\% graduated, 66\% obtained a council registration, and 28\% obtained a license to practice. Of the 17 405 nurses and midwives who obtained a council registration as of May 2009, 96\% are of Ugandan nationality and just 3\% received their training outside of the country. Thirteen per cent obtained a registration for more than one type of training. Most (34\%) trainings with a council registration are for the enrolled nurse training, followed by enrolled midwife (25\%), registered (more advanced) nurse (21\%), registered midwife (11\%), and more specialized trainings (9\%). CONCLUSION: The UNMC database is valuable in monitoring and reviewing information about nurses and midwives. However, information obtained from this system is also important in improving strategic planning for the greater health care system in Uganda. We hope that the use of a real-world example of HRIS strengthening provides guidance for the implementation of similar projects in other countries or contexts.}, issn = {1478-4491}, author = {Spero, Julie C and McQuide, Pamela A and Matte, Rita} } @article {606, title = {Use of an innovative, affordable, and open-source short message service-based tool to monitor malaria in remote areas of Uganda.}, journal = {The American journal of tropical medicine and hygiene}, volume = {85}, year = {2011}, month = {2011 Jul}, pages = {26-33}, abstract = {Abstract. Quality health management requires timely and accurate data, and paper-based reporting does not fill this role adequately. The introduction of malaria rapid diagnostic tests and the availability of wireless communications present an opportunity to open direct data transmission and feedback between peripheral health workers and central managers. In November 2009, the Uganda Ministry of Health deployed a short message service-based reporting system in two districts. At a set-up cost of $100/health facility, local technician support of $ 400 per month, and a cost of $0.53/week/clinic, the SMS reporting system was started at more than 140 clinics. Positivity rates for rapid diagnostic tests and artemisinin combination therapy stock outs were 48\% and 54\% in Kabale and 71\% and 54\% in Gulu, among other reports, at more than 85\% health facilities reporting weekly and without monetary incentives or additional supervision. The SMS-based reporting systems have potential to improve timeliness in reporting of specific, time-sensitive metrics at modest cost, while by-passing current bottlenecks in the flow of data. With the development of specific capacity to manage stock data at district level, the availability of timely data offers potential to address commodity distribution problems and reduce stock-outs.}, issn = {1476-1645}, author = {Asiimwe, Caroline and Gelvin, David and Lee, Evan and Ben Amor, Yanis and Quinto, Ebony and Katureebe, Charles and Sundaram, Lakshmi and Bell, David and Berg, Matt} } @article {599, title = {Wikipedia: a key tool for global public health promotion.}, journal = {Journal of medical Internet research}, volume = {13}, year = {2011}, month = {2011}, pages = {e14}, abstract = {The Internet has become an important health information resource for patients and the general public. Wikipedia, a collaboratively written Web-based encyclopedia, has become the dominant online reference work. It is usually among the top results of search engine queries, including when medical information is sought. Since April 2004, editors have formed a group called WikiProject Medicine to coordinate and discuss the English-language Wikipedia{\textquoteright}s medical content. This paper, written by members of the WikiProject Medicine, discusses the intricacies, strengths, and weaknesses of Wikipedia as a source of health information and compares it with other medical wikis. Medical professionals, their societies, patient groups, and institutions can help improve Wikipedia{\textquoteright}s health-related entries. Several examples of partnerships already show that there is enthusiasm to strengthen Wikipedia{\textquoteright}s biomedical content. Given its unique global reach, we believe its possibilities for use as a tool for worldwide health promotion are underestimated. We invite the medical community to join in editing Wikipedia, with the goal of providing people with free access to reliable, understandable, and up-to-date health information.}, keywords = {Consumer Health Information, Encyclopedias as Topic, Health Promotion, Humans, Information Dissemination, Information Services, Internet, Patient Education as Topic, Public Health, World Health}, issn = {1438-8871}, author = {Heilman, James M and Kemmann, Eckhard and Bonert, Michael and Chatterjee, Anwesh and Ragar, Brent and Beards, Graham M and Iberri, David J and Harvey, Matthew and Thomas, Brendan and Stomp, Wouter and Martone, Michael F and Lodge, Daniel J and Vondracek, Andrea and de Wolff, Jacob F and Liber, Casimir and Grover, Samir C and Vickers, Tim J and Mesk{\'o}, Bertalan and Laurent, Micha{\"e}l R} } @article {849, title = {The Yale cTAKES extensions for document classification: architecture and application.}, journal = {J Am Med Inform Assoc}, volume = {18}, year = {2011}, month = {2011 Sep-Oct}, pages = {614-20}, abstract = {

BACKGROUND: Open-source clinical natural-language-processing (NLP) systems have lowered the barrier to the development of effective clinical document classification systems. Clinical natural-language-processing systems annotate the syntax and semantics of clinical text; however, feature extraction and representation for document classification pose technical challenges.

METHODS: The authors developed extensions to the clinical Text Analysis and Knowledge Extraction System (cTAKES) that simplify feature extraction, experimentation with various feature representations, and the development of both rule and machine-learning based document classifiers. The authors describe and evaluate their system, the Yale cTAKES Extensions (YTEX), on the classification of radiology reports that contain findings suggestive of hepatic decompensation.

RESULTS AND DISCUSSION: The F(1)-Score of the system for the retrieval of abdominal radiology reports was 96\%, and was 79\%, 91\%, and 95\% for the presence of liver masses, ascites, and varices, respectively. The authors released YTEX as open source, available at http://code.google.com/p/ytex.

}, keywords = {Connecticut, Data Mining, Decision Support Systems, Clinical, electronic health records, Humans, Liver Failure, Natural Language Processing, Pattern Recognition, Automated, Radiology Information Systems}, issn = {1527-974X}, doi = {10.1136/amiajnl-2011-000093}, author = {Garla, Vijay and Lo Re, Vincent and Dorey-Stein, Zachariah and Kidwai, Farah and Scotch, Matthew and Womack, Julie and Justice, Amy and Brandt, Cynthia} } @article {610, title = {Adaptive radiotherapy based on contrast enhanced cone beam CT imaging.}, journal = {Acta oncologica (Stockholm, Sweden)}, volume = {49}, year = {2010}, month = {2010 Oct}, pages = {972-7}, abstract = {Cone beam CT (CBCT) imaging has become an integral part of radiation therapy, with images typically used for offline or online patient setup corrections based on bony anatomy co-registration. Ideally, the co-registration should be based on tumor localization. However, soft tissue contrast in CBCT images may be limited. In the present work, contrast enhanced CBCT (CECBCT) images were used for tumor visualization and treatment adaptation. Material and methods. A spontaneous canine maxillary tumor was subjected to repeated cone beam CT imaging during fractionated radiotherapy (10 fractions in total). At five of the treatment fractions, CECBCT images, employing an iodinated contrast agent, were acquired, as well as pre-contrast CBCT images. The tumor was clearly visible in post-contrast minus pre-contrast subtraction images, and these contrast images were used to delineate gross tumor volumes. IMRT dose plans were subsequently generated. Four different strategies were explored: 1) fully adapted planning based on each CECBCT image series, 2) planning based on images acquired at the first treatment fraction and patient repositioning following bony anatomy co-registration, 3) as for 2), but with patient repositioning based on co-registering contrast images, and 4) a strategy with no patient repositioning or treatment adaptation. The equivalent uniform dose (EUD) and tumor control probability (TCP) calculations to estimate treatment outcome for each strategy. Results. Similar translation vectors were found when bony anatomy and contrast enhancement co-registration were compared. Strategy 1 gave EUDs closest to the prescription dose and the highest TCP. Strategies 2 and 3 gave EUDs and TCPs close to that of strategy 1, with strategy 3 being slightly better than strategy 2. Even greater benefits from strategies 1 and 3 are expected with increasing tumor movement or deformation during treatment. The non-adaptive strategy 4 was clearly inferior to all three adaptive strategies. Conclusion. CECBCT may prove useful for adaptive radiotherapy.}, keywords = {Animals, Carcinoma, Cone-Beam Computed Tomography, Contrast Media, Dog Diseases, Dogs, Female, Maxillary Neoplasms, Patient Positioning, Radiographic Image Enhancement, Radiotherapy Planning, Computer-Assisted}, issn = {1651-226X}, author = {S{\o}vik, Aste and R{\o}dal, Jan and Skogmo, Hege K and Lerv{\r a}g, Christoffer and Eilertsen, Karsten and Malinen, Eirik} } @article {871, title = {The caBIG annotation and image Markup project.}, journal = {J Digit Imaging}, volume = {23}, year = {2010}, month = {2010 Apr}, pages = {217-25}, abstract = {

Image annotation and markup are at the core of medical interpretation in both the clinical and the research setting. Digital medical images are managed with the DICOM standard format. While DICOM contains a large amount of meta-data about whom, where, and how the image was acquired, DICOM says little about the content or meaning of the pixel data. An image annotation is the explanatory or descriptive information about the pixel data of an image that is generated by a human or machine observer. An image markup is the graphical symbols placed over the image to depict an annotation. While DICOM is the standard for medical image acquisition, manipulation, transmission, storage, and display, there are no standards for image annotation and markup. Many systems expect annotation to be reported verbally, while markups are stored in graphical overlays or proprietary formats. This makes it difficult to extract and compute with both of them. The goal of the Annotation and Image Markup (AIM) project is to develop a mechanism, for modeling, capturing, and serializing image annotation and markup data that can be adopted as a standard by the medical imaging community. The AIM project produces both human- and machine-readable artifacts. This paper describes the AIM information model, schemas, software libraries, and tools so as to prepare researchers and developers for their use of AIM.

}, keywords = {Computational Biology, Computer Communication Networks, Databases, Factual, Diagnostic Imaging, Humans, Interdisciplinary Communication, Medical Records Systems, Computerized, National Cancer Institute (U.S.), National Institutes of Health (U.S.), Neoplasms, Program Evaluation, Quality of Health Care, Radiographic Image Enhancement, Radiology Information Systems, Software, Systems Integration, United States, User-Computer Interface}, issn = {1618-727X}, doi = {10.1007/s10278-009-9193-9}, author = {Channin, David S and Mongkolwat, Pattanasak and Kleper, Vladimir and Sepukar, Kastubh and Rubin, Daniel L} } @article {20226035, title = {CASE: a framework for computer supported outbreak detection.}, journal = {BMC medical informatics and decision making}, volume = {10}, year = {2010}, month = {2010}, pages = {14}, abstract = {BACKGROUND: In computer supported outbreak detection, a statistical method is applied to a collection of cases to detect any excess cases for a particular disease. Whether a detected aberration is a true outbreak is decided by a human expert. We present a technical framework designed and implemented at the Swedish Institute for Infectious Disease Control for computer supported outbreak detection, where a database of case reports for a large number of infectious diseases can be processed using one or more statistical methods selected by the user. RESULTS: Based on case information, such as diagnosis and date, different statistical algorithms for detecting outbreaks can be applied, both on the disease level and the subtype level. The parameter settings for the algorithms can be configured independently for different diagnoses using the provided graphical interface. Input generators and output parsers are also provided for all supported algorithms. If an outbreak signal is detected, an email notification is sent to the persons listed as receivers for that particular disease. CONCLUSIONS: The framework is available as open source software, licensed under GNU General Public License Version 3. By making the code open source, we wish to encourage others to contribute to the future development of computer supported outbreak detection systems, and in particular to the development of the CASE framework.}, author = {Cakici, Baki and Hebing, Kenneth and Gr{\"u}newald, Maria and Saretok, Paul and Hulth, Anette} } @proceedings {513, title = {Comprehensive Yet Scalable Health Information Systems for Low Resource Settings: A Collaborative Effort in Sierra Leone}, year = {2010}, month = {11/2010}, abstract = {We address the problem of how to integrate health informa- tion systems in low-income African countries in which techni- cal infrastructure and human resources vary wildly within countries. We describe a set of tools to meet the needs of dif- ferent service areas including managing aggregate indicators, patient level record systems, and mobile tools for community outreach. We present the case of Sierra Leone and use this case to motivate and illustrate an architecture that allows us to provide services at each level of the health system (na- tional, regional, facility and community) and provide different configurations of the tools as appropriate for the individual area. Finally, we present a, collaborative implementation of this approach in Sierra Leone.}, url = {http://proceedings.amia.org/127frh}, author = {Braaa, J{\o}rn and Kanterb, Andrew S. and Leshc, Neal and Crichtond, Ryan and Jolliffea, Bob and S{\ae}b{\o}e, Johan and Kossia, Edem and Seebregts, Christopher J} } @article {20118730, title = {Computer-aided volumetric comparison of reconstructed orbits for blow-out fractures with nonpreformed versus 3-dimensionally preformed titanium mesh plates: a preliminary study.}, journal = {Journal of computer assisted tomography}, volume = {34}, year = {2010}, month = {2010 Jan}, pages = {98-104}, abstract = {OBJECTIVES: To compare and evaluate, using computer-aided volumetric measurements, the accuracy and reliability of nonpreformed mesh plates (NPMPs) versus 3-dimensionally preformed titanium mesh plates (PMPs) in posttraumatic orbital volume restoration. PATIENTS AND METHODS: Facial coronal computed tomographic scan slices from 20 patients (10 in the NPMP and 10 in the PMP group) were used to measure bony orbital volume using OsiriX Medical Image software (version 3.3.2, www.osirix-viewer.com). The procedure was performed on both orbits; thereafter, the volume of the contralateral health orbit was used as a control for comparison in the 2 groups of patients. The difference in orbital volume between the 2 groups and between the reconstructed versus uninjured side in both groups have been statistically correlated. RESULTS: The mean orbital volume between the reconstructed (19.215 mL in NPMP and 21.791 mL in PMP) and the contralateral uninjured side (18.955 mL in NPMP and 21.710 mL in PMP) was not statistically significant (P > 0.05). The mean orbital volumes of the reconstructed orbits were 19.215 mL in the NPMP and 21.791 mL in the PMP group, with no statistically significant difference (P > 0.05). The volume data of the reconstructed orbit fitted that of the contralateral uninjured orbit with an accuracy of a maximum of 1.85 mL in the NPMP group and 2.5 mL in the PMP group. CONCLUSIONS: The current study demonstrated that there were no significant differences in the orbital volume restoration using either technique. In fact, both techniques allow for close reproduction of natural orbital volume and shape, and its use in posttraumatic orbital wall reconstruction was successful.}, author = {Scolozzi, Paolo and Momjian, Armen and Heuberger, Joris} } @article {20335647, title = {The development and design of an electronic patient record using open source web-based technology.}, journal = {The HIM journal}, volume = {39}, year = {2010}, month = {2010}, pages = {30-5}, abstract = {This paper describes the method used to develop the One Stop Crisis Centre (OSCC) Portal, an open source web-based electronic patient record system (EPR) for the One Stop Crisis Center, Hospital Universiti Sains Malaysia (HUSM) in Kelantan, Malaysia. Features and functionalities of the system are presented to demonstrate the workflow. Use of the OSCC Portal improved data integration and data communication and contributed to improvements in care management. With implementation of the OSCC portal, improved coordination between disciplines and standardisation of data in HUSM were noticed. It is expected that this will in turn result in improved data confidentiality and data integrity. The collected data will also be useful for quality assessment and research. Other low-resource centers with limited computer hardware and access to open-source software could benefit from this endeavour.}, author = {Syed-Mohamad, Sharifa Mastura and Ali, Siti Hawa and Mat-Husin, Mohd Nazri} } @article {20517664, title = {E-health integration and interoperability based on open-source information technology.}, journal = {Wiener klinische Wochenschrift}, volume = {122 Suppl 2}, year = {2010}, month = {2010 May}, pages = {3-10}, author = {Dinevski, Dejan and Poli, Andrea and Krajnc, Ivan and Sustersic, Olga and Arh, Tanja} } @article {904, title = {Electronic data capture for registries and clinical trials in orthopaedic surgery: Open source versus commercial systems}, journal = {Clinical Orthopaedics and Related Research}, volume = {468}, year = {2010}, pages = {2664{\textendash}2671}, abstract = {Background: Collection and analysis of clinical data can help orthopaedic surgeons to practice evidence based medicine. Spreadsheets and offline relational databases are prevalent, but not flexible, secure, workflow friendly and do not support the generation of standardized and interoperable data. Additionally these data collection applications usually do not follow a structured and planned approach which may result in failure to achieve the intended goal. Questions/purposes: Our purposes are (1) to provide a brief overview of EDC systems, their types, and related pros and cons as well as to describe commonly used EDC platforms and their features; and (2) describe simple steps involved in designing a registry/clinical study in DADOS P, an open source EDC system. Where are we now?: Electronic data capture systems aimed at addressing these issues are widely being adopted at an institutional/national/ international level but are lacking at an individual level. A wide array of features, relative pros and cons and different business models cause confusion and indecision among orthopaedic surgeons interested in implementing EDC systems. Where do we need to go?: To answer clinical questions and actively participate in clinical studies, orthopaedic surgeons should collect data in parallel to their clinical activities. Adopting a simple, user-friendly, and robust EDC system can facilitate the data collection process. How do we get there?: Conducting a balanced evaluation of available options and comparing them with intended goals and requirements can help orthopaedic surgeons to make an informed choice. {\textcopyright} 2010 The Association of Bone and Joint Surgeons{\textregistered}.}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-77957336660\&partnerID=40\&md5=dec3706be86215e9656a06f82265cb28}, author = {Shah, J.a b and Rajgor, D.a b and Pradhan, S.a b and McCready, M.c and Zaveri, A.a d and Pietrobon, R.c} } @article {586, title = {The extensible open-source rigid and affine image registration module of the Medical Imaging Interaction Toolkit (MITK).}, journal = {Computer methods and programs in biomedicine}, volume = {100}, year = {2010}, month = {2010 Oct}, pages = {79-86}, abstract = {Although non-rigid registration methods are available or under development for many specific problems in medicine, rigid and affine registration is an important task that is often performed for pre-aligning images before using non-rigid registration. In this paper, we present a free and open-source application for rigid and affine image registration, which is designed both for developers and for end-users. The application is based on the Medical Imaging Interaction Toolkit (MITK) and allows for inter-modality and intra-modality rigid 2D-2D and 3D-3D registration of medical images such as CT, MRI, or ultrasound. The framework as well as the application can be easily extended by adding new transforms, metrics and optimizers. Thus, developers of new algorithms are enabled to test and use their algorithms more quickly, spending less work on user interfaces. Additionally, the framework provides the possibility to use image masks to restrict the evaluation of metric values by the optimizer on certain areas of the images.}, keywords = {Computers, Diagnostic Imaging, Image Enhancement, Image Interpretation, Computer-Assisted, Pattern Recognition, Automated, Software, User-Computer Interface}, issn = {1872-7565}, author = {Stein, D and Fritzsche, K H and Nolden, M and Meinzer, H P and Wolf, I} } @article {20409608, title = {The extensible open-source rigid and affine image registration module of the Medical Imaging Interaction Toolkit (MITK).}, journal = {Computer methods and programs in biomedicine}, year = {2010}, month = {2010 Apr 19}, abstract = {Although non-rigid registration methods are available or under development for many specific problems in medicine, rigid and affine registration is an important task that is often performed for pre-aligning images before using non-rigid registration. In this paper, we present a free and open-source application for rigid and affine image registration, which is designed both for developers and for end-users. The application is based on the Medical Imaging Interaction Toolkit (MITK) and allows for inter-modality and intra-modality rigid 2D-2D and 3D-3D registration of medical images such as CT, MRI, or ultrasound. The framework as well as the application can be easily extended by adding new transforms, metrics and optimizers. Thus, developers of new algorithms are enabled to test and use their algorithms more quickly, spending less work on user interfaces. Additionally, the framework provides the possibility to use image masks to restrict the evaluation of metric values by the optimizer on certain areas of the images.}, author = {Stein, D and Fritzsche, K H and Nolden, M and Meinzer, H P and Wolf, I} } @article {20974022, title = {Functionalities of free and open electronic health record systems.}, journal = {International journal of technology assessment in health care}, volume = {26}, year = {2010}, month = {2010 Oct}, pages = {382-9}, abstract = {Objectives: The aim of this study was to examine open-source electronic health record (EHR) software to determine their level of functionalities according to the International Organization for Standardization (ISO) standards.Methods: ISO standards were used as a guideline to determine and describe the reference architecture and functionalities of a standard electronic health record system as well the environmental context for which the software has been built. Twelve open-source EHR systems were selected and evaluated according to two-dimensional criteria based on ISO/TS 18308:2004 functional requirements and ISO/TR 20514:2005 context of the EHR system.Results: Open EHR software programs mostly fulfill structural, procedural, evolutional, and medicolegal requirements at the minimal and full functionality levels. Communication, privacy, and security requirements are accomplished in less than 23 percent of the cases, mainly at minimal functional level. Ethical, cultural, and consumer requirements still need to be fulfilled by free and open-source EHR applications.Conclusions: Most analyzed systems had several functional limitations. Nevertheless, especially for clinicians and decision makers in developing countries, open-source EHR systems are an option. The limited functionalities are likely to become requirements for further releases of open-source EHR systems.}, author = {Flores Zuniga, Alejandro Enrique and Win, Khin Than and Susilo, Willy} } @article {20663165, title = {Heterogeneity prevails: the state of clinical trial data management in Europe - results of a survey of ECRIN centres.}, journal = {Trials}, volume = {11}, year = {2010}, month = {2010 Jul 21}, pages = {79}, abstract = {ABSTRACT: BACKGROUND: The use of Clinical Data Management Systems (CDMS) has become essential in clinical trials to handle the increasing amount of data that must be collected and analyzed. With a CDMS trial data are captured at investigator sites with "electronic Case Report Forms". Although more and more of these electronic data management systems are used in academic research centres an overview of CDMS products and of available data management and quality management resources for academic clinical trials in Europe is missing. METHODS: The ECRIN (European Clinical Research Infrastructure Network) data management working group conducted a two-part standardized survey on data management, software tools, and quality management for clinical trials. The questionnaires were answered by nearly 80 centres / units (with an overall response rate of 47\% and 43\%) from 12 European countries and EORTC. RESULTS: Our survey shows that about 90\% of centres have a CDMS in routine use. Of these CDMS nearly 50\% are commercial systems; Open Source solutions don{\textquoteright}t play a major role. In general, solutions used for clinical data management are very heterogeneous: 20 different commercial CDMS products (7 Open Source solutions) in addition to 17/18 proprietary systems are in use. The most widely employed CDMS products are MACRO TM and Capture System TM, followed by solutions that are used in at least 3 centres: eResearch Network TM, CleanWeb TM, GCP Base TM and SAS TM. Although quality management systems for data management are in place in most centres / units, there exist some deficits in the area of system validation. CONCLUSIONS: Because the considerable heterogeneity of data management software solutions may be a hindrance to cooperation based on trial data exchange, standards like CDISC (Clinical Data Interchange Standard Consortium) should be implemented more widely. In a heterogeneous environment the use of data standards can simplify data exchange, increase the quality of data and prepare centres for new developments (e.g. the use of EHR for clinical research). Because data management and the use of electronic data capture systems in clinical trials are characterized by the impact of regulations and guidelines, ethical concerns are discussed. In this context quality management becomes an important part of compliant data management. To address these issues ECRIN will establish certified data centres to support electronic data management and associated compliance needs of clinical trial centres in Europe.}, url = {http://www.trialsjournal.com/content/11/1/79}, author = {Kuchinke, Wolfgang and Ohmann, Christian and Yang, Qin and Salas, Nader and Lauritsen, Jens and Gueyffier, Francois and Leizorovicz, Alan and Schade-Brittinger, Carmen and Wittenberg, Michael and Voko, Zoltan and Gaynor, Siobhan and Cooney, Margaret and Doran, Peter and Maggioni, Aldo and Lorimer, Andrea and Torres, Ferran and McPherson, Gladys and Charvill, Jim and Hellstrom, Mats and Lejeune, Stephane} } @article {1054, title = {How informatics can potentiate precompetitive open-source collaboration to jump-start drug discovery and development.}, journal = {Clin Pharmacol Ther}, volume = {87}, year = {2010}, month = {2010 May}, pages = {614-6}, keywords = {Animals, Cooperative Behavior, Drug Discovery, Drug Industry, Economic Competition, Humans, Informatics, Information Dissemination}, issn = {1532-6535}, doi = {10.1038/clpt.2010.21}, author = {Perakslis, E D and Van Dam, J and Szalma, S} } @article {1014, title = {Implementing OpenMRS for patient monitoring in an HIV/AIDS care and treatment program in rural Mozambique.}, journal = {Stud Health Technol Inform}, volume = {160}, year = {2010}, month = {2010}, pages = {411-5}, abstract = {

We have adopted the Open Medical Record System (OpenMRS) framework to implement an electronic patient monitoring system for an HIV care and treatment program in Mozambique. The program provides technical assistance to the Ministry of Health supporting the scale up of integrated HIV care and support services in health facilities in rural resource limited settings. The implementation is in use for adult and pediatric programs, with ongoing roll-out to cover all supported sites. We describe early experiences in adapting the system to the program needs, addressing infrastructure challenges, creating a regional support team, training data entry staff, migrating a legacy database, deployment, and current use. We find that OpenMRS offers excellent prospects for in-country development of health information systems, even in severely resource limited settings. However, it also requires considerable organizational infrastructure investment and technical capacity building to ensure continued local support.

}, keywords = {Database Management Systems, Delivery of Health Care, electronic health records, HIV Infections, Information Storage and Retrieval, Medical Record Linkage, Mozambique, Population Surveillance, Rural Health Services}, issn = {0926-9630}, author = {Manders, Eric-Jan and Jos{\'e}, Eurico and Solis, Manuel and Burlison, Janeen and Nhampossa, Jos{\'e} Leopoldo and Moon, Troy} } @proceedings {512, title = {Indivo X: Developing a Fully Substitutable Personally Controlled Health Record Platform}, year = {2010}, month = {11/2010}, abstract = {To support a rich ecosystem of third-party applications around a personally controlled health record (PCHR), we have redesigned Indivo, the original PCHR, as a web- based platform with feature-level substitutability. Core to this new release is the Indivo X Application Program- ming Interface (API), the contract between the PCHR platform and the end-user apps. Using rapid iterative de- velopment to build a minimal feature set from real-world requirements, the resulting Indivo X API, now in public stable beta, is enabling developers, including third-party contributors, to quickly create and integrate novel fea- tures into patients{\textquoteright} online records, ultimately building a fully customizable experience for diverse patient needs.}, url = {http://proceedings.amia.org/127eoo}, author = {Adida, Ben and Sanyal, Arjun and Zabak, Steve and Kohane, Isaac S. and Mandl, Kenneth D} } @article {847, title = {Mayo clinical Text Analysis and Knowledge Extraction System (cTAKES): architecture, component evaluation and applications.}, journal = {J Am Med Inform Assoc}, volume = {17}, year = {2010}, month = {2010 Sep-Oct}, pages = {507-13}, abstract = {

We aim to build and evaluate an open-source natural language processing system for information extraction from electronic medical record clinical free-text. We describe and evaluate our system, the clinical Text Analysis and Knowledge Extraction System (cTAKES), released open-source at http://www.ohnlp.org. The cTAKES builds on existing open-source technologies-the Unstructured Information Management Architecture framework and OpenNLP natural language processing toolkit. Its components, specifically trained for the clinical domain, create rich linguistic and semantic annotations. Performance of individual components: sentence boundary detector accuracy=0.949; tokenizer accuracy=0.949; part-of-speech tagger accuracy=0.936; shallow parser F-score=0.924; named entity recognizer and system-level evaluation F-score=0.715 for exact and 0.824 for overlapping spans, and accuracy for concept mapping, negation, and status attributes for exact and overlapping spans of 0.957, 0.943, 0.859, and 0.580, 0.939, and 0.839, respectively. Overall performance is discussed against five applications. The cTAKES annotations are the foundation for methods and modules for higher-level semantic processing of clinical free-text.

}, keywords = {Biomedical Research, electronic health records, Information Storage and Retrieval, Natural Language Processing}, issn = {1527-974X}, doi = {10.1136/jamia.2009.001560}, author = {Savova, Guergana K and Masanz, James J and Ogren, Philip V and Zheng, Jiaping and Sohn, Sunghwan and Kipper-Schuler, Karin C and Chute, Christopher G} } @article {1043, title = {An open-source software tool for the generation of relaxation time maps in magnetic resonance imaging.}, journal = {BMC Med Imaging}, volume = {10}, year = {2010}, month = {2010}, pages = {16}, abstract = {

BACKGROUND: In magnetic resonance (MR) imaging, T1, T2 and T2* relaxation times represent characteristic tissue properties that can be quantified with the help of specific imaging strategies. While there are basic software tools for specific pulse sequences, until now there is no universal software program available to automate pixel-wise mapping of relaxation times from various types of images or MR systems. Such a software program would allow researchers to test and compare new imaging strategies and thus would significantly facilitate research in the area of quantitative tissue characterization.

RESULTS: After defining requirements for a universal MR mapping tool, a software program named MRmap was created using a high-level graphics language. Additional features include a manual registration tool for source images with motion artifacts and a tabular DICOM viewer to examine pulse sequence parameters. MRmap was successfully tested on three different computer platforms with image data from three different MR system manufacturers and five different sorts of pulse sequences: multi-image inversion recovery T1; Look-Locker/TOMROP T1; modified Look-Locker (MOLLI) T1; single-echo T2/T2*; and multi-echo T2/T2*. Computing times varied between 2 and 113 seconds. Estimates of relaxation times compared favorably to those obtained from non-automated curve fitting. Completed maps were exported in DICOM format and could be read in standard software packages used for analysis of clinical and research MR data.

CONCLUSIONS: MRmap is a flexible cross-platform research tool that enables accurate mapping of relaxation times from various pulse sequences. The software allows researchers to optimize quantitative MR strategies in a manufacturer-independent fashion. The program and its source code were made available as open-source software on the internet.

}, keywords = {Algorithms, Humans, Image Enhancement, Image Interpretation, Computer-Assisted, Magnetic Resonance Imaging, Programming Languages, Reproducibility of Results, Sensitivity and Specificity, Software}, issn = {1471-2342}, doi = {10.1186/1471-2342-10-16}, author = {Messroghli, Daniel R and Rudolph, Andre and Abdel-Aty, Hassan and Wassmuth, Ralf and Kuhne, Titus and Dietz, Rainer and Schulz-Menger, Jeanette} } @article {1471-2342-10-16, title = {An open-source software tool for the generation of relaxation time maps in magnetic resonance imaging}, journal = {BMC Medical Imaging}, volume = {10}, number = {1}, year = {2010}, pages = {16}, abstract = {BACKGROUND:In magnetic resonance (MR) imaging, T1, T2 and T2* relaxation times represent characteristic tissue properties that can be quantified with the help of specific imaging strategies. While there are basic software tools for specific pulse sequences, until now there is no universal software program available to automate pixel-wise mapping of relaxation times from various types of images or MR systems. Such a software program would allow researchers to test and compare new imaging strategies and thus would significantly facilitate research in the area of quantitative tissue characterization. RESULTS:After defining requirements for a universal MR mapping tool, a software program named MRmap was created using a high-level graphics language. Additional features include a manual registration tool for source images with motion artifacts and a tabular DICOM viewer to examine pulse sequence parameters. MRmap was successfully tested on three different computer platforms with image data from three different MR system manufacturers and five different sorts of pulse sequences: multi-image inversion recovery T1; Look-Locker/ TOMROP T1; modified Look-Locker inversion recovery (MOLLI) T1; single-echo T2/ T2*; and multi-echo T2/ T2*. Computing times varied between 2 and 113 seconds. Estimates of relaxation times compared favorably to those obtained from non-automated curve fitting. Completed maps were exported in DICOM format and could be read in standard software packages used for analysis of clinical and research MR data. CONCLUSIONS:MRmap is a flexible cross-platform research tool that enables accurate mapping of relaxation times from various pulse sequences. The software allows researchers to optimize quantitative MR strategies in a manufacturer-independent fashion. The program and its source code were made available as open-source software on the internet. }, issn = {1471-2342}, doi = {10.1186/1471-2342-10-16}, url = {http://www.biomedcentral.com/1471-2342/10/16}, author = {Messroghli, Daniel and Rudolph, Andre and Abdel-Aty, Hassan and Wassmuth, Ralf and Kuhne, Titus and Dietz, Rainer and Schulz-Menger, Jeanette} } @proceedings {508, title = {The Role of Workarounds during an Open Source Electronic Medical Record System Implementation}, year = {2010}, abstract = {A significant degree of customization of medical information technology is required to effectively integrate the promise of IT with the diversity and complexity of medical work. In the absence of such customizations, dissatisfaction and resistance toward the system arise. Indeed, the complexity of the medical work and the inability of software to tailor to the diverse medical practices may explain the limited diffusion of health information systems especially in North America. We study the role of workarounds during an open-source Electronic Medical Record System (EMR) implementation at a medium-size urgent care clinic in a major Canadian city. We found that the technology appropriation process involved the evolving of number of non-trivial workarounds in order to match the EMR to medical work. The emergence of workarounds is conceptualized as a knowledge creation and integration process. This perspective allows us to look at the antecedents and the change dynamics of workarounds in the clinic. Furthermore diverging from the negative view toward workarounds, we discuss the importance of incorporating workarounds during and following system development. The workaround perspective shed the light on how users{\textquoteright} behavior can be channeled into a constructive development effort. This paper contributes by examining the workaround of medical practitioners using an open-source electronic medical record system as well as offering a knowledge perspective for the study of EMR appropriation.}, url = {http://aisel.aisnet.org/icis2010_submissions/47}, author = {Safadi, Hani and Faraj, Samer} } @conference {1099, title = {The Role of workarounds during an OpenSource Electronic Medical Record System Implementation.}, booktitle = {ICIS}, year = {2010}, author = {Safadi, Hani and Faraj, Samer} } @inbook {springerlink:10.1007/978-3-642-12197-5_85, title = {rtMEG: A Real-Time Software Toolbox for Brain-Machine Interfaces Using Magnetoencephelography}, booktitle = {17th International Conference on Biomagnetism Advances in Biomagnetism {\textendash} Biomag2010}, series = {IFMBE Proceedings}, volume = {28}, year = {2010}, note = {10.1007/978-3-642-12197-5_85}, pages = {362-365}, publisher = {Springer Berlin Heidelberg}, organization = {Springer Berlin Heidelberg}, isbn = {978-3-642-12197-5}, url = {http://dx.doi.org/10.1007/978-3-642-12197-5_85}, author = {Sudre, Gustavo and Wang, Wei and Song, Tao and Kajola, Matti and Vinjamuri, Ramana and Collinger, Jennifer and Degenhart, Alan and Bagic, Anto and Weber, Doug J.}, editor = {Magjarevic, R. and Nagel, J. H. and Supek, Selma and Su{\v s}ac, Ana} } @article {20482787, title = {Screensaver: an open source lab information management system (LIMS) for high throughput screening facilities.}, journal = {BMC bioinformatics}, volume = {11}, year = {2010}, month = {2010}, pages = {260}, abstract = {BACKGROUND: Shared-usage high throughput screening (HTS) facilities are becoming more common in academe as large-scale small molecule and genome-scale RNAi screening strategies are adopted for basic research purposes. These shared facilities require a unique informatics infrastructure that must not only provide access to and analysis of screening data, but must also manage the administrative and technical challenges associated with conducting numerous, interleaved screening efforts run by multiple independent research groups. RESULTS: We have developed Screensaver, a free, open source, web-based lab information management system (LIMS), to address the informatics needs of our small molecule and RNAi screening facility. Screensaver supports the storage and comparison of screening data sets, as well as the management of information about screens, screeners, libraries, and laboratory work requests. To our knowledge, Screensaver is one of the first applications to support the storage and analysis of data from both genome-scale RNAi screening projects and small molecule screening projects. CONCLUSIONS: The informatics and administrative needs of an HTS facility may be best managed by a single, integrated, web-accessible application such as Screensaver. Screensaver has proven useful in meeting the requirements of the ICCB-Longwood/NSRB Screening Facility at Harvard Medical School, and has provided similar benefits to other HTS facilities.}, author = {Tolopko, Andrew N and Sullivan, John P and Erickson, Sean D and Wrobel, David and Chiang, Su L and Rudnicki, Katrina and Rudnicki, Stewart and Nale, Jennifer and Selfors, Laura M and Greenhouse, Dara and Muhlich, Jeremy L and Shamu, Caroline E} } @article {20001786, title = {Using ImageJ for the quantitative analysis of flow-based adhesion assays in real-time under physiologic flow conditions.}, journal = {Platelets}, volume = {21}, year = {2010}, month = {2010 Feb}, pages = {60-6}, abstract = {This article intends to close the gap between the abundance of regular articles focusing on adhesive mechanisms of cells in a flow field and purely technical reports confined to the description of newly developed algorithms, not yet ready to be used by users without programming skills. A simple and robust method is presented for analysing raw videomicroscopic data of flow-based adhesion assays using the freely available public domain software ImageJ. We describe in detail the image processing routines used to rapidly and reliably evaluate the number of adherent and translocating platelets in videomicroscopic recordings. The depicted procedures were exemplified by analysing platelet interaction with immobilized von Willebrand factor and fibrinogen in flowing blood under physiological wall shear rates. Neutralizing GPIbalpha function reduced shear-dependent platelet translocation on von Willebrand factor and abolished firm platelet adhesion. Abciximab, Tirofiban and Eptifibatide completely inhibited GPIIb/IIIa-dependent stable platelet deposition on fibrinogen. The presented method to analyse videomicroscopic recordings from flow-based adhesion assays offers the advantage of providing a simple and reliable way to quantify flow-based adhesion assays, which is completely based on ImageJ and can easily be applied to study adhesion mechanisms of cells in non-fluorescent modes without the need to deviate from the presented protocol.}, author = {Meyer dos Santos, Sascha and Klinkhardt, Ute and Schneppenheim, Reinhard and Harder, Sebastian} } @article {499, title = {Bayesian analysis of neuroimaging data in FSL.}, journal = {NeuroImage}, volume = {45}, year = {2009}, month = {2009 Mar}, pages = {S173-86}, abstract = {Typically in neuroimaging we are looking to extract some pertinent information from imperfect, noisy images of the brain. This might be the inference of percent changes in blood flow in perfusion FMRI data, segmentation of subcortical structures from structural MRI, or inference of the probability of an anatomical connection between an area of cortex and a subthalamic nucleus using diffusion MRI. In this article we will describe how Bayesian techniques have made a significant impact in tackling problems such as these, particularly in regards to the analysis tools in the FMRIB Software Library (FSL). We shall see how Bayes provides a framework within which we can attempt to infer on models of neuroimaging data, while allowing us to incorporate our prior belief about the brain and the neuroimaging equipment in the form of biophysically informed or regularising priors. It allows us to extract probabilistic information from the data, and to probabilistically combine information from multiple modalities. Bayes can also be used to not only compare and select between models of different complexity, but also to infer on data using committees of models. Finally, we mention some analysis scenarios where Bayesian methods are impractical, and briefly discuss some practical approaches that we have taken in these cases.}, keywords = {Bayes Theorem, Brain, Diffusion Magnetic Resonance Imaging, Humans, Image Interpretation, Computer-Assisted, Software}, issn = {1095-9572}, author = {Woolrich, Mark W and Jbabdi, Saad and Patenaude, Brian and Chappell, Michael and Makni, Salima and Behrens, Timothy and Beckmann, Christian and Jenkinson, Mark and Smith, Stephen M} } @article {873, title = {The caBIG terminology review process.}, journal = {J Biomed Inform}, volume = {42}, year = {2009}, month = {2009 Jun}, pages = {571-80}, abstract = {

The National Cancer Institute (NCI) is developing an integrated biomedical informatics infrastructure, the cancer Biomedical Informatics Grid (caBIG), to support collaboration within the cancer research community. A key part of the caBIG architecture is the establishment of terminology standards for representing data. In order to evaluate the suitability of existing controlled terminologies, the caBIG Vocabulary and Data Elements Workspace (VCDE WS) working group has developed a set of criteria that serve to assess a terminology{\textquoteright}s structure, content, documentation, and editorial process. This paper describes the evolution of these criteria and the results of their use in evaluating four standard terminologies: the Gene Ontology (GO), the NCI Thesaurus (NCIt), the Common Terminology for Adverse Events (known as CTCAE), and the laboratory portion of the Logical Objects, Identifiers, Names and Codes (LOINC). The resulting caBIG criteria are presented as a matrix that may be applicable to any terminology standardization effort.

}, keywords = {Medical Informatics, National Institutes of Health (U.S.), Terminology as Topic, United States}, issn = {1532-0480}, doi = {10.1016/j.jbi.2008.12.003}, author = {Cimino, James J and Hayamizu, Terry F and Bodenreider, Olivier and Davis, Brian and Stafford, Grace A and Ringwald, Martin} } @article {19056288, title = {Dimensional error of selective laser sintering, three-dimensional printing and PolyJet models in the reproduction of mandibular anatomy.}, journal = {Journal of cranio-maxillo-facial surgery : official publication of the European Association for Cranio-Maxillo-Facial Surgery}, volume = {37}, year = {2009}, month = {2009 Apr}, pages = {167-73}, abstract = {BACKGROUND: Selective laser sintering (SLS), three-dimensional printing (3DP) and PolyJet are rapid prototyping (RP) techniques to fabricate prototypes from virtual biomedical images. To be used in maxillofacial surgery, these models must accurately reproduce the craniofacial skeleton. PURPOSE: To analyze the capacity of SLS, 3DP and PolyJet models to reproduce mandibular anatomy and their dimensional error. MATERIAL: Dry mandible, helical CT images, SLS, 3DP and PolyJet prototypes, and digital electronic caliper. METHODS: Helical CT images were acquired from a dry mandible (criterion standard) and manipulated with the InVesalius software. Prototypes were produced using SLS, 3DP and PolyJet techniques. Thirteen linear measurements of each prototype were made and compared with the dry mandible measurements. RESULTS: The results showed a dimensional error of 1.79\%, 3.14\% and 2.14\% for SLS, 3DP and PolyJet models, respectively. The models satisfactorily reproduced anatomic details and the SLS and PolyJet prototypes showed greater dimensional precision and reproduced mandibular anatomy more accurately than the 3DP model. CONCLUSIONS: The SLS prototype had a greater dimensional accuracy than the PolyJet and 3DP models. The PolyJet technique reproduced anatomic details of the mandible more accurately.}, author = {Ibrahim, Danilo and Broilo, Tiago Leonardo and Heitz, Claiton and Gerhardt de Oliveira, Mar{\'\i}lia and de Oliveira, Helena Willhelm and Nobre, Stella Maris Wanderlei and Dos Santos Filho, Jos{\'e} Henrique Gomes and Silva, Daniela Nascimento} } @article {19457798, title = {Free and open source software for the manipulation of digital images.}, journal = {AJR. American journal of roentgenology}, volume = {192}, year = {2009}, month = {2009 Jun}, pages = {W330-4}, abstract = {OBJECTIVE: Free and open source software is a type of software that is nearly as powerful as commercial software but is freely downloadable. This software can do almost everything that the expensive programs can. GIMP (gnu image manipulation program) is the free program that is comparable to Photoshop, and versions are available for Windows, Macintosh, and Linux platforms. This article briefly describes how GIMP can be installed and used to manipulate radiology images. CONCLUSION: It is no longer necessary to budget large amounts of money for high-quality software to achieve the goals of image processing and document creation because free and open source software is available for the user to download at will.}, author = {Solomon, Robert W} } @inbook {springerlink:10.1007/978-3-642-01932-6_45, title = {GIMIAS: An Open Source Framework for Efficient Development of Research Tools and Clinical Prototypes}, booktitle = {Functional Imaging and Modeling of the Heart}, series = {Lecture Notes in Computer Science}, volume = {5528}, year = {2009}, note = {10.1007/978-3-642-01932-6_45}, pages = {417-426}, publisher = {Springer Berlin / Heidelberg}, organization = {Springer Berlin / Heidelberg}, abstract = {GIMIAS is a workflow-oriented environment for addressing advanced biomedical image computing and build personalized computational models, which is extensible through the development of application-specific plug-ins. In addition, GIMIAS provides an open source framework for efficient development of research and clinical software prototypes integrating contributions from the Virtual Physiological Human community while allowing business-friendly technology transfer and commercial product development. This framework has been fully developed in ANSI-C++ on top of well known open source libraries like VTK, ITK and wxWidgets among others. Based on GIMIAS, in this paper is presented a workflow for medical image analysis and simulation of the heart.}, url = {http://dx.doi.org/10.1007/978-3-642-01932-6_45}, author = {Larrabide, Ignacio and Omedas, Pedro and Martelli, Yves and Planes, Xavier and Nieber, Maarten and Moya, Juan and Butakoff, Constantine and Sebasti{\'a}n, Rafael and Camara, Oscar and De Craene, Mathieu and Bijnens, Bart and Frangi, Alejandro}, editor = {Ayache, Nicholas and Delingette, Herv{\'e} and Sermesant, Maxime} } @article {20044607, title = {Image analysis of breast cancer immunohistochemistry-stained sections using ImageJ: an RGB-based model.}, journal = {Anticancer research}, volume = {29}, year = {2009}, month = {2009 Dec}, pages = {4995-8}, abstract = {BACKGROUND: Image analysis of tissue sections using RGB image profiling is a modern accepted technique. MATERIALS AND METHODS: A new method of RGB analysis, using the freeware ImageJ, is presented which can be applied to sections with either nuclear or cytoplasmic staining. The step-by-step process is presented and the method is tested using breast cancer specimens immunostained for CK-19 and estrogen receptors. RESULTS: This image analysis easily discriminates CK-19 and estrogen receptor positivity in prepared breast cancer specimens. The method is easy to perform, without the need for previous image transformations. CONCLUSION: Compared to previous methods, this method proved more accurate in estimating the actual colours that an observer recognizes as positive after immunostaining. Further studies are needed to evaluate whether this method is efficient enough to be applied in clinical practice.}, author = {Vrekoussis, T and Chaniotis, V and Navrozoglou, I and Dousias, V and Pavlakis, K and Stathopoulos, E N and Zoras, O} } @conference {1085, title = {MediGrid {\textendash} Facilitating Semantic-Based processing of Biomedical Data and Knowledge}, booktitle = {Proceedings of the 1st International Workshop on Open Source in European Health Care: The Time is Ripe (BIOSTEC 2009)}, year = {2009}, isbn = {978-989-8111-79-1}, doi = {10.5220/0001828500180021}, author = {Jan Vejvalka and Petr Lesn{\'y} and Tom{\'a}{\v s} Hole{\v c}ek and Kry{\v s}tof Slab{\'y} and Ad{\'e}la Jarol{\'\i}mkov{\'a} and Helena Bouzkov{\'a}} } @article {1017, title = {Millennium Global Village-Net: bringing together Millennium Villages throughout sub-Saharan Africa.}, journal = {Int J Med Inform}, volume = {78}, year = {2009}, month = {2009 Dec}, pages = {802-7}, abstract = {

The Millennium Villages Project (MVP), based at The Earth Institute at Columbia University, is a bottom-up, community led approach to show how villages in developing countries can get out of the poverty trap that afflicts more than a billion people worldwide. With well-targeted, practical inputs can help the community invest in a path leading to self-sustaining development. There are 80 Millennium Villages clustered in 10 countries throughout sub-Saharan Africa. MVP is an important development process for empowering communities to invest in a package of integrated interventions aiming to increase food production, improve access to safe water, health care, education and infrastructure. The process benefits from synergies of the integrated approach and relies on community leadership as empowered by proven technological inputs. MVP is committed to a science-based approach to assess and monitor the progress of the communities towards clear objectives; the Millennium Development Goals (MDGs) and to do so with mechanisms that are scalable and sustainable. This approach offers much more than simply collecting and analyzing data since the mechanism used for recording progress would provide a bridge over the divide which separates the haves and the have-nots (by facilitating the sharing of solutions from one community to another bidirectionally). By so doing, it allows people to enhance their own futures in a sustainable manner. Solutions found in one community are transferable to similar communities in other MVP villages. To achieve this goal, the MVP requires an information and communication system which can provide both necessary infrastructure for monitoring and evaluation, and tools for communicating among the villages, cities and countries. This system is called the Millennium Global Village-Net (MGV-Net). It takes advantage of the latest in open source software (OpenMRS), databases (MySQL), interface terminology, a centralized concept dictionary, and uses appropriate technology locally for data entry.

}, keywords = {Africa South of the Sahara, Community Health Services, Developing Countries, Healthy People Programs, Humans, Poverty}, issn = {1872-8243}, doi = {10.1016/j.ijmedinf.2009.08.002}, author = {Kanter, Andrew S and Negin, Joel and Olayo, Bernard and Bukachi, Frederick and Johnson, Edward and Sachs, Sonia Ehrlich} } @article {1018, title = {Mobile Care (Moca) for Remote Diagnosis and Screening.}, journal = {J Health Inform Dev Ctries}, volume = {3}, year = {2009}, month = {2009 Jan 1}, pages = {17-21}, abstract = {

Moca is a cell phone-facilitated clinical information system to improve diagnostic, screening and therapeutic capabilities in remote resource-poor settings. The software allows transmission of any medical file, whether a photo, x-ray, audio or video file, through a cell phone to (1) a central server for archiving and incorporation into an electronic medical record (to facilitate longitudinal care, quality control, and data mining), and (2) a remote specialist for real-time decision support (to leverage expertise). The open source software is designed as an end-to-end clinical information system that seamlessly connects health care workers to medical professionals. It is integrated with OpenMRS, an existing open source medical records system commonly used in developing countries.

}, issn = {1178-4407}, author = {Celi, Leo Anthony and Sarmenta, Luis and Rotberg, Jhonathan and Marcelo, Alvin and Clifford, Gari} } @article {20351824, title = {Mychildren{\textquoteright}s: integration of a personally controlled health record with a tethered patient portal for a pediatric and adolescent population.}, journal = {AMIA ... Annual Symposium proceedings / AMIA Symposium. AMIA Symposium}, volume = {2009}, year = {2009}, month = {2009}, pages = {65-9}, abstract = {Personally controlled health records (PCHRs) and patient portals are increasingly being offered by healthcare institutions, employers, insurance companies and commercial entities to allow patients access to their health information. Both applications offer unique services to provide patients with tools to manage their health. While PCHRs allow users ubiquitous, portable, patient controlled access to their health information, traditional patient portals provide provider-tethered applications allowing patients access, but not control of, certain healthcare information, as well as communication and administrative functions, such as secure messaging, appointment management and prescription refill requests, facilitating care at a specific healthcare facility.We describe our approach for the design, content creation, policy development, and implementation of MyChildren{\textquoteright}s, a unique web-based application leveraging the advantages of both a provider-tethered patient portal and a PCHR to allow patients and their guardians access to the functionality and convenience of a traditional patient portal, as well as the portability and flexibility of a PCHR.}, author = {Bourgeois, Fabienne C and Mandl, Kenneth D and Shaw, Danny and Flemming, Daisy and Nigrin, Daniel J} } @article {19157968, title = {The OpenMRS Implementers Network.}, journal = {International journal of medical informatics}, volume = {78}, year = {2009}, month = {2009 Nov}, pages = {711-20}, abstract = {OBJECTIVE: OpenMRS (www.openmrs.org) is a configurable open source electronic medical record application developed and maintained by a large network of open source developers coordinated by the Regenstrief Institute and Partners in Health and mainly used for HIV patient and treatment information management in Africa. Our objective is to develop an open Implementers Network for OpenMRS to provide regional support for the growing number of OpenMRS implementations in Africa and to include African developers and implementers in the future growth of OpenMRS. METHODS: We have developed the OpenMRS Implementers Network using a dedicated Wiki site and e-mail server. We have also organized annual meetings in South Africa and regional training courses at African locations where OpenMRS is being implemented. An OpenMRS Internship program has been initiated and we have started collaborating with similar networks and projects working in Africa. To evaluate its potential, OpenMRS was implemented initially at one site in South Africa by a single implementer using a downloadable OpenMRS application and only the OpenMRS Implementers Network for support. RESULTS: The OpenMRS Implementers Network Wiki and list server have grown into effective means of providing implementation support and forums for exchange of implementation experiences. The annual OpenMRS Implementers meeting has been held in South Africa for the past three years and is attracting successively larger numbers of participants with almost 200 implementers and developers attending the 2008 meeting in Durban, South Africa. Six African developers are presently registered on the first intake of the OpenMRS Internship program. Successful collaborations have been started with several African developer groups and projects initiated to develop interoperability between OpenMRS and various applications. The South African OpenMRS Implementer group successfully configured, installed and maintained an integrated HIV/TB OpenMRS application without significant programming support. Since then, this model has been replicated in several other African sites. The OpenMRS Implementers Network has contributed substantially to the growth and sustainability of OpenMRS in Africa and has become a useful way of including Africans in the development and implementation of OpenMRS in developing countries. The Network provides valuable support and enables a basic OpenMRS application to be implemented in the absence of onsite programmers.}, url = {http://www.healthware.org/openmrs/openmrs.pdf}, author = {Seebregts, Christopher J and Mamlin, Burke W and Biondich, Paul G and Fraser, Hamish S F and Wolfe, Benjamin A and Jazayeri, Darius and Allen, Christian and Miranda, Justin and Baker, Elaine and Musinguzi, Nicholas and Kayiwa, Daniel and Fourie, Carl and Lesh, Neal and Kanter, Andrew and Yiannoutsos, Constantin T and Bailey, Christopher} } @article {19534858, title = {OsiriX, a useful tool for processing tomographic images in patients with facial fracture}, journal = {Cirugia y cirujanos}, volume = {77}, year = {2009}, month = {2009 Mar-Apr}, pages = {95-9}, abstract = {BACKGROUND: OsiriX, a Mac OS X-based open source program, is presented as a useful tool to process tomographic images for diagnosis and preoperative planning in patients with facial fractures. METHODS: CT scans were performed on 124 patients with facial fractures treated at the Department of Maxillofacial and Reconstructive Surgery of the Hospital de Traumatolog{\'\i}a y Ortopedia "Lomas Verdes" in Mexico City. Information obtained was recorded in DICOM format in CDs and processed in a Macintosh laptop with OsiriX software, doing multiplanar and 3D reconstructions. Surgical findings were compared to the images obtained by the software. RESULTS: Of the surgical findings, 96.5\% matched with the OsiriX images. Only 3.5\% of the OsiriX images were not consistent because of distortion or artifacts in the CT due to firearm projectiles and Erich arch bars near the involved area. CONCLUSIONS: Based on the results obtained, the authors consider that the OsiriX software is a useful tool for diagnosis and preoperative planning in patients with facial fractures. Furthermore, it prevents the loss of information due to the process of image selection by the radiology staff.}, author = {Sierra-Mart{\'\i}nez, Eduardo and Cienfuegos-Monroy, Ricardo and Fern{\'a}ndez-Sobrino, Gerardo} } @conference {903, title = {PESCA: Developing an open source platform to bring eHealth to latin america and the caribbean}, booktitle = {Proceedings of the 1st International Workshop on Open Source in European Health Care: The Time is Ripe, OSEHC 2009 In Conjunction with BIOSTEC 2009 and the EFMI LIFOSS WG}, year = {2009}, abstract = {Nowadays the society needs to communicate and the technologies are revolutionizing the information systems, especially for the health; where an effective use of the technologies is used to favor the needs of the persons. These technologies can contribute to the development of the local economies. Open source software (OSS) can be an useful strategy to bring information and communication technologies to developing countries. However, specially in Latin America and the Caribbean, there are some barriers in adopting OSS for health: the need for open standards, heterogeneous OSS developed without normalization and metrics, English predominance as top OSS language, lack of initiatives to evaluate existing health OSS and needs for quality control and functional validation. The Open Source Platform for eHealth (PESCA) has been designed as a set of interoperable modules that can solve either: simple problems on health management and communication in primary care or complex problems in healthcare systems, including telehealth communications between heterogeneous institutions.}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-67650527704\&partnerID=40\&md5=cb5a91f3ccf525ed19857b1e3354a61d}, author = {Orcero, D.S.a}, editor = {L{\'o}pez, D.L.b} } @article {19184561, title = {PyMVPA: A python toolbox for multivariate pattern analysis of fMRI data.}, journal = {Neuroinformatics}, volume = {7}, year = {2009}, month = {2009 Spring}, pages = {37-53}, abstract = {Decoding patterns of neural activity onto cognitive states is one of the central goals of functional brain imaging. Standard univariate fMRI analysis methods, which correlate cognitive and perceptual function with the blood oxygenation-level dependent (BOLD) signal, have proven successful in identifying anatomical regions based on signal increases during cognitive and perceptual tasks. Recently, researchers have begun to explore new multivariate techniques that have proven to be more flexible, more reliable, and more sensitive than standard univariate analysis. Drawing on the field of statistical learning theory, these new classifier-based analysis techniques possess explanatory power that could provide new insights into the functional properties of the brain. However, unlike the wealth of software packages for univariate analyses, there are few packages that facilitate multivariate pattern classification analyses of fMRI data. Here we introduce a Python-based, cross-platform, and open-source software toolbox, called PyMVPA, for the application of classifier-based analysis techniques to fMRI datasets. PyMVPA makes use of Python{\textquoteright}s ability to access libraries written in a large variety of programming languages and computing environments to interface with the wealth of existing machine learning packages. We present the framework in this paper and provide illustrative examples on its usage, features, and programmability.}, author = {Hanke, Michael and Halchenko, Yaroslav O and Sederberg, Per B and Hanson, Stephen Jos{\'e} and Haxby, James V and Pollmann, Stefan} } @article {19212459, title = {PyMVPA: A Unifying Approach to the Analysis of Neuroscientific Data.}, journal = {Frontiers in neuroinformatics}, volume = {3}, year = {2009}, month = {2009}, pages = {3}, abstract = {The Python programming language is steadily increasing in popularity as the language of choice for scientific computing. The ability of this scripting environment to access a huge code base in various languages, combined with its syntactical simplicity, make it the ideal tool for implementing and sharing ideas among scientists from numerous fields and with heterogeneous methodological backgrounds. The recent rise of reciprocal interest between the machine learning (ML) and neuroscience communities is an example of the desire for an inter-disciplinary transfer of computational methods that can benefit from a Python-based framework. For many years, a large fraction of both research communities have addressed, almost independently, very high-dimensional problems with almost completely non-overlapping methods. However, a number of recently published studies that applied ML methods to neuroscience research questions attracted a lot of attention from researchers from both fields, as well as the general public, and showed that this approach can provide novel and fruitful insights into the functioning of the brain. In this article we show how PyMVPA, a specialized Python framework for machine learning based data analysis, can help to facilitate this inter-disciplinary technology transfer by providing a single interface to a wide array of machine learning libraries and neural data-processing methods. We demonstrate the general applicability and power of PyMVPA via analyses of a number of neural data modalities, including fMRI, EEG, MEG, and extracellular recordings.}, author = {Hanke, Michael and Halchenko, Yaroslav O and Sederberg, Per B and Olivetti, Emanuele and Fr{\"u}nd, Ingo and Rieger, Jochem W and Herrmann, Christoph S and Haxby, James V and Hanson, Stephen Jos{\'e} and Pollmann, Stefan} } @article {19806300, title = {Recent advances in visualization, imaging, and navigation in hepatobiliary and pancreatic sciences.}, journal = {Journal of hepato-biliary-pancreatic surgery}, year = {2009}, month = {2009 Oct 6}, abstract = {BACKGROUND/PURPOSE: Recent introduction of multi-detector CT (MDCT) and high-speed magnetic resonance (MR) imaging have dramatically advanced visualization and imaging technology in diagnostic and therapeutic strategy in hepatobiliary pancreatic disease. However, image diagnostics have progressed with a background of the essence of anatomy, pathology, and physiology. It is important to object the reflection of the patient{\textquoteright}s condition and pathology of each disease and remove pattern recognition in what they were depicted as an image. Visualization plays another important role in various medical diagnostics. Trends in scientific visualization will depend on advancements in molecular technology and computer hardware as well as trends in engineering disciplines. METHODS: In this special issue, the recent advances in visualization and imaging in the field of hepatobiliary and pancreatic sciences are featured including application of advanced visualization techniques, data management, data compression, feature extraction. RESULTS: We discuss the potential benefits of new technologies and procedures in hepatobiliary and pancreatic areas, that are circulating tumor cells, MR imaging for hepatocellular carcinoma, indocyanine green using fluorescence under infrared light observation, carbon dioxide enhanced MDCT virtual cholangiopancreatography, endoscopic ultrasonography-guided biliary drainage, natural orifice translumenal endoscopic surgery, MR-laparoscopy, and image overlay navigation surgery by OsiriX. CONCLUSION: Some of the recent trends are discussed in terms of visualization and imaging in hapatobiliary and pancreatic sciences. The goal in using visualization is to assist existing scientific procedures by providing new insight through visual representation.}, author = {Sugimoto, Maki} } @conference {1089, title = {Repositories of Reusable Auxological (Growth) Algorithms for eHealth}, booktitle = {Proceedings of the 1st International Workshop on Open Source in European Health Care: The Time is Ripe (BIOSTEC 2009)}, year = {2009}, isbn = {978-989-8111-79-1}, doi = {10.5220/0001827900540058}, author = {Petr Lesn{\'y} and Hana Kr{\'a}sni{\v c}anov{\'a} and Tom{\'a}{\v s} Hole{\v c}ek and Kry{\v s}tof Slab{\'y} and Jan Vejvalka} } @article {19425593, title = {TOPPView: an open-source viewer for mass spectrometry data.}, journal = {Journal of proteome research}, volume = {8}, year = {2009}, month = {2009 Jul}, pages = {3760-3}, abstract = {Visualization of complex mass spectrometric data sets is becoming increasingly important in proteomics and metabolomics. We present TOPPView, an integrated data visualization and analysis tool for mass spectrometric data sets. TOPPView allows the visualization and comparison of individual mass spectra, two-dimensional LC-MS data sets and their accompanying metadata. By supporting standardized XML-based data exchange formats, data import is possible from any type of mass spectrometer. The integrated analysis tools of the OpenMS Proteomics Pipeline (TOPP) allow efficient data analysis from within TOPPView through a convenient graphical user interface. TOPPView runs on all major operating systems and is available free of charge under an open-source license at http://www.openms.de.}, author = {Sturm, Marc and Kohlbacher, Oliver} } @conference {1090, title = {Writing Open Source SunXACML Access Control in Electronic Health Record with Acceptable Performances}, booktitle = {Proceedings of the 1st International Workshop on Open Source in European Health Care: The Time is Ripe (BIOSTEC 2009)}, year = {2009}, isbn = {978-989-8111-79-1}, doi = {10.5220/0001813400590068}, author = {Snezana Sucurovic and Dejan Simic} } @article {920, title = {The AQUA-FONTIS study: protocol of a multidisciplinary, cross-sectional and prospective longitudinal study for developing standardized diagnostics and classification of non-thyroidal illness syndrome.}, journal = {BMC Endocr Disord}, volume = {8}, year = {2008}, month = {2008}, pages = {13}, abstract = {

BACKGROUND: Non-thyroidal illness syndrome (NTIS) is a characteristic functional constellation of thyrotropic feedback control that frequently occurs in critically ill patients. Although this condition is associated with significantly increased morbidity and mortality, there is still controversy on whether NTIS is caused by artefacts, is a form of beneficial adaptation, or is a disorder requiring treatment. Trials investigating substitution therapy of NTIS revealed contradictory results. The comparison of heterogeneous patient cohorts may be the cause for those inconsistencies.

OBJECTIVES: Primary objective of this study is the identification and differentiation of different functional states of thyrotropic feedback control in order to define relevant evaluation criteria for the prognosis of affected patients. Furthermore, we intend to assess the significance of an innovative physiological index approach (SPINA) in differential diagnosis between NTIS and latent (so-called "sub-clinical") thyrotoxicosis.Secondary objective is observation of variables that quantify distinct components of NTIS in the context of independent predictors of evolution, survival or pathophysiological condition and influencing or disturbing factors like medication.

DESIGN: The approach to a quantitative follow-up of non-thyroidal illness syndrome (AQUA FONTIS study) is designed as both a cross-sectional and prospective longitudinal observation trial in critically ill patients. Patients are observed in at least two evaluation points with consecutive assessments of thyroid status, physiological and clinical data in additional weekly observations up to discharge. A second part of the study investigates the neuropsychological impact of NTIS and medium-term outcomes.The study design incorporates a two-module structure that covers a reduced protocol in form of an observation trial before patients give informed consent. Additional investigations are performed if and after patients agree in participation.

TRIAL REGISTRATION: ClinicalTrials.gov NCT00591032.

}, issn = {1472-6823}, doi = {10.1186/1472-6823-8-13}, author = {Dietrich, Johannes W and Stachon, Axel and Antic, Biljana and Klein, Harald H and Hering, Steffen} } @article {18999296, title = {caBIG compatibility review system: software to support the evaluation of applications using defined interoperability criteria.}, journal = {AMIA ... Annual Symposium proceedings / AMIA Symposium. AMIA Symposium}, year = {2008}, month = {2008}, pages = {197-201}, abstract = {The caBIG Compatibility Review System (CRS) is a web-based application to support compatibility reviews, which certify that software applications that pass the review meet a specific set of criteria that allow them to interoperate. The CRS contains workflows that support both semantic and syntactic reviews, which are performed by the caBIG Vocabularies and Common Data Elements (VCDE) and Architecture workspaces, respectively. The CRS increases the efficiency of compatibility reviews by reducing administrative overhead and it improves uniformity by ensuring that each review is conducted according to a standard process. The CRS provides metrics that allow the review team to evaluate the level of data element reuse in an application, a first step towards quantifying the extent of harmonization between applications. Finally, functionality is being added that will provide automated validation of checklist criteria, which will further simplify the review process.}, author = {Freimuth, Robert R and Schauer, Michael W and Lodha, Preeti and Govindrao, Poornima and Nagarajan, Rakesh and Chute, Christopher G} } @article {18096909, title = {caGrid 1.0: an enterprise Grid infrastructure for biomedical research.}, journal = {Journal of the American Medical Informatics Association : JAMIA}, volume = {15}, year = {2008}, month = {2008 Mar-Apr}, pages = {138-49}, abstract = {OBJECTIVE: To develop software infrastructure that will provide support for discovery, characterization, integrated access, and management of diverse and disparate collections of information sources, analysis methods, and applications in biomedical research. DESIGN: An enterprise Grid software infrastructure, called caGrid version 1.0 (caGrid 1.0), has been developed as the core Grid architecture of the NCI-sponsored cancer Biomedical Informatics Grid (caBIG) program. It is designed to support a wide range of use cases in basic, translational, and clinical research, including 1) discovery, 2) integrated and large-scale data analysis, and 3) coordinated study. MEASUREMENTS: The caGrid is built as a Grid software infrastructure and leverages Grid computing technologies and the Web Services Resource Framework standards. It provides a set of core services, toolkits for the development and deployment of new community provided services, and application programming interfaces for building client applications. RESULTS: The caGrid 1.0 was released to the caBIG community in December 2006. It is built on open source components and caGrid source code is publicly and freely available under a liberal open source license. The core software, associated tools, and documentation can be downloaded from the following URL: https://cabig.nci.nih.gov/workspaces/Architecture/caGrid. CONCLUSIONS: While caGrid 1.0 is designed to address use cases in cancer research, the requirements associated with discovery, analysis and integration of large scale data, and coordinated studies are common in other biomedical fields. In this respect, caGrid 1.0 is the realization of a framework that can benefit the entire biomedical community.}, author = {Oster, Scott and Langella, Stephen and Hastings, Shannon and Ervin, David and Madduri, Ravi and Phillips, Joshua and Kurc, Tahsin and Siebenlist, Frank and Covitz, Peter and Shanbhag, Krishnakant and Foster, Ian and Saltz, Joel} } @article {18366760, title = {OpenMS - an open-source software framework for mass spectrometry.}, journal = {BMC bioinformatics}, volume = {9}, year = {2008}, month = {2008}, pages = {163}, abstract = {BACKGROUND: Mass spectrometry is an essential analytical technique for high-throughput analysis in proteomics and metabolomics. The development of new separation techniques, precise mass analyzers and experimental protocols is a very active field of research. This leads to more complex experimental setups yielding ever increasing amounts of data. Consequently, analysis of the data is currently often the bottleneck for experimental studies. Although software tools for many data analysis tasks are available today, they are often hard to combine with each other or not flexible enough to allow for rapid prototyping of a new analysis workflow. RESULTS: We present OpenMS, a software framework for rapid application development in mass spectrometry. OpenMS has been designed to be portable, easy-to-use and robust while offering a rich functionality ranging from basic data structures to sophisticated algorithms for data analysis. This has already been demonstrated in several studies. CONCLUSION: OpenMS is available under the Lesser GNU Public License (LGPL) from the project website at http://www.openms.de.}, url = {http://www.biomedcentral.com/1471-2105/9/163}, author = {Sturm, Marc and Bertsch, Andreas and Gr{\"o}pl, Clemens and Hildebrandt, Andreas and Hussong, Rene and Lange, Eva and Pfeifer, Nico and Schulz-Trieglaff, Ole and Zerck, Alexandra and Reinert, Knut and Kohlbacher, Oliver} } @article {18570637, title = {OSPACS: Ultrasound image management system.}, journal = {Source code for biology and medicine}, volume = {3}, year = {2008}, month = {2008}, pages = {11}, abstract = {ABSTRACT: BACKGROUND: Ultrasound scanning uses the medical imaging format, DICOM, for electronically storing the images and data associated with a particular scan. Large health care facilities typically use a picture archiving and communication system (PACS) for storing and retrieving such images. However, these systems are usually not suitable for managing large collections of anonymized ultrasound images gathered during a clinical screening trial. RESULTS: We have developed a system enabling the accurate archiving and management of ultrasound images gathered during a clinical screening trial. It is based upon a Windows application utilizing an open-source DICOM image viewer and a relational database. The system automates the bulk import of DICOM files from removable media by cross-validating the patient information against an external database, anonymizing the data as well as the image, and then storing the contents of the file as a field in a database record. These image records may then be retrieved from the database and presented in a tree-view control so that the user can select particular images for display in a DICOM viewer or export them to external media. CONCLUSION: This system provides error-free automation of ultrasound image archiving and management, suitable for use in a clinical trial. An open-source project has been established to promote continued development of the system.}, url = {http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2442597/}, author = {Stott, Will and Ryan, Andy and Jacobs, Ian J and Menon, Usha and Bessant, Conrad and Jones, Christopher} } @article {17850667, title = {Indivo: a personally controlled health record for health information exchange and communication.}, journal = {BMC medical informatics and decision making}, volume = {7}, year = {2007}, month = {2007}, pages = {25}, abstract = {BACKGROUND: Personally controlled health records (PCHRs), a subset of personal health records (PHRs), enable a patient to assemble, maintain and manage a secure copy of his or her medical data. Indivo (formerly PING) is an open source, open standards PCHR with an open application programming interface (API). RESULTS: We describe how the PCHR platform can provide standard building blocks for networked PHR applications. Indivo allows the ready integration of diverse sources of medical data under a patient{\textquoteright}s control through the use of standards-based communication protocols and APIs for connecting PCHRs to existing and future health information systems. CONCLUSION: The strict and transparent personal control model is designed to encourage widespread participation by patients, healthcare providers and institutions, thus creating the ecosystem for development of innovative, consumer-focused healthcare applications.}, url = {http://www.biomedcentral.com/1472-6947/7/25/}, author = {Mandl, Kenneth D and Simons, William W and Crawford, William C R and Abbett, Jonathan M} } @article {17249408, title = {OpenSourcePACS: an extensible infrastructure for medical image management.}, journal = {IEEE transactions on information technology in biomedicine : a publication of the IEEE Engineering in Medicine and Biology Society}, volume = {11}, year = {2007}, month = {2007 Jan}, pages = {94-109}, abstract = {The development of comprehensive picture archive and communication systems (PACS) has mainly been limited to proprietary developments by vendors, though a number of freely available software projects have addressed specific image management tasks. The openSourcePACS project aims to provide an open source, common foundation upon which not only can a basic PACS be readily implemented, but to also support the evolution of new PACS functionality through the development of novel imaging applications and services. openSourcePACS consists of four main software modules: 1) image order entry, which enables the ordering and tracking of structured image requisitions; 2) an agent-based image server framework that coordinates distributed image services including routing, image processing, and querying beyond the present digital image and communications in medicine (DICOM) capabilities; 3) an image viewer, supporting standard display and image manipulation tools, DICOM presentation states, and structured reporting; and 4) reporting and result dissemination, supplying web-based widgets for creating integrated reports. All components are implemented using Java to encourage cross-platform deployment. To demonstrate the usage of openSourcePACS, a preliminary application supporting primary care/specialist communication was developed and is described herein. Ultimately, the goal of openSourcePACS is to promote the wide-scale development and usage of PACS and imaging applications within academic and research communities.}, url = {http://escholarship.org/uc/item/186368fv}, author = {Bui, Alex A T and Morioka, Craig and Dionisio, John David N and Johnson, David B and Sinha, Usha and Ardekani, Siamak and Taira, Ricky K and Aberle, Denise R and El-Saden, Suzie and Kangarloo, Hooshang} } @article {18183912, title = {PyEPL: a cross-platform experiment-programming library.}, journal = {Behavior research methods}, volume = {39}, year = {2007}, month = {2007 Nov}, pages = {950-8}, abstract = {PyEPL (the Python Experiment-Programming Library) is a Python library which allows cross-platform and object-oriented coding of behavioral experiments. It provides functions for displaying text and images onscreen, as well as playing and recording sound, and is capable of rendering 3-D virtual environments forspatial-navigation tasks. It is currently tested for Mac OS X and Linux. It interfaces with Activewire USB cards (on Mac OS X) and the parallel port (on Linux) for synchronization of experimental events with physiological recordings. In this article, we first present two sample programs which illustrate core PyEPL features. The examples demonstrate visual stimulus presentation, keyboard input, and simulation and exploration of a simple 3-D environment. We then describe the components and strategies used in implementing PyEPL.}, author = {Geller, Aaron S and Schlefer, Ian K and Sederberg, Per B and Jacobs, Joshua and Kahana, Michael J} } @article {17700904, title = {Reflections on the role of open source in health information system interoperability.}, journal = {Yearbook of medical informatics}, year = {2007}, month = {2007}, pages = {50-60}, abstract = {OBJECTIVES: This paper reflects on the role of open source in health information system interoperability. Open source is a driving force in computer science research and the development of information systems. It facilitates the sharing of information and ideas, enables evolutionary development and open collaborative testing of code, and broadens the adoption of interoperability standards. In health care, information systems have been developed largely ad hoc following proprietary specifications and customized design. However, the wide deployment of integrated services such as Electronic Health Records (EHRs) over regional health information networks (RHINs) relies on interoperability of the underlying information systems and medical devices. METHODS: This reflection is built on the experiences of the PICNIC project that developed shared software infrastructure components in open source for RHINs and the OpenECG network that offers open source components to lower the implementation cost of interoperability standards such as SCP-ECG, in electrocardiography. RESULTS: Open source components implementing standards and a community providing feedback from real-world use are key enablers of health care information system interoperability. CONCLUSIONS: Investing in open source is investing in interoperability and a vital aspect of a long term strategy towards comprehensive health services and clinical research.}, author = {Sfakianakis, S and Chronaki, C E and Chiarugi, F and Conforti, F and Katehakis, D G} } @conference {782, title = {Remote Large Data Visualization in the ParaView Framework.}, booktitle = {EGPGV}, year = {2006}, publisher = {Eurographics Association}, organization = {Eurographics Association}, keywords = {dblp}, isbn = {3-905673-40-1}, url = {http://dblp.uni-trier.de/db/conf/egpgv/egpgv2006.html$\#$CedilnikGMAF06}, author = {Cedilnik, Andy and Geveci, Berk and Moreland, Kenneth and Ahrens, James P. and Favre, Jean M.}, editor = {Heirich, Alan and Raffin, Bruno and dos Santos, Lu{\'\i}s Paulo Peixoto} } @article {1041, title = {Static telepathology in cancer institute of Tehran university: report of the first academic experience in Iran.}, journal = {Diagn Pathol}, volume = {1}, year = {2006}, month = {2006}, pages = {33}, abstract = {

Telepathology is the practice of pathology, which allows quick and timely access to an expert opinion at a distance. We analyzed our new experience in cancer Institute of Tehran University of Medical Sciences with the iPath telepathology server of Basel University. One hundred sixty one cases in a period of 32 months were consulted. These cases received for second evaluation but the definite diagnosis could not be made in this centre. The number of images per case ranged from 3 to 32 (mean: 8). Except one case all cases were evaluated by consultants. Definite final diagnosis was achieved in 88/160 (54.7\%). Recommendations for further evaluation were offered in 42/160 cases (26\%). Major discrepancies were encountered in 30/160 cases (19\%). Thirty-nine of the cases (24.3\%) were reported within 1 day. The rate of achieving final diagnosis was higher in histological group rather than cytological ones. Increase in number of H\&E images had no significant effect on achieving a definite final diagnosis. The rate of achieving final diagnosis in this study is much lower than other similar studies, which could be due to inappropriate sampling images, a potential cause of misdiagnosis in static telepathology. The other possible reason is that all of the cases in this study were problematic cases that a definite diagnosis could not be made for them even in primary consultation. The mean time for achieving a final diagnosis was also more than other studies, which could be for the reasons mentioned above.

}, issn = {1746-1596}, doi = {10.1186/1746-1596-1-33}, author = {Abdirad, Afshin and Sarrafpour, Babak and Ghaderi-Sohi, Siavash} } @article {16545965, title = {User-guided 3D active contour segmentation of anatomical structures: significantly improved efficiency and reliability.}, journal = {NeuroImage}, volume = {31}, year = {2006}, month = {2006 Jul 1}, pages = {1116-28}, abstract = {Active contour segmentation and its robust implementation using level set methods are well-established theoretical approaches that have been studied thoroughly in the image analysis literature. Despite the existence of these powerful segmentation methods, the needs of clinical research continue to be fulfilled, to a large extent, using slice-by-slice manual tracing. To bridge the gap between methodological advances and clinical routine, we developed an open source application called ITK-SNAP, which is intended to make level set segmentation easily accessible to a wide range of users, including those with little or no mathematical expertise. This paper describes the methods and software engineering philosophy behind this new tool and provides the results of validation experiments performed in the context of an ongoing child autism neuroimaging study. The validation establishes SNAP intrarater and interrater reliability and overlap error statistics for the caudate nucleus and finds that SNAP is a highly reliable and efficient alternative to manual tracing. Analogous results for lateral ventricle segmentation are provided.}, author = {Yushkevich, Paul A and Piven, Joseph and Hazlett, Heather Cody and Smith, Rachel Gimpel and Ho, Sean and Gee, James C and Gerig, Guido} } @article {16257947, title = {Information technology as a tool to improve the quality of American Indian health care.}, journal = {American journal of public health}, volume = {95}, year = {2005}, month = {2005 Dec}, pages = {2173-9}, abstract = {The American Indian/Alaska Native population experiences a disproportionate burden of disease across a spectrum of conditions. While the recent National Healthcare Disparities Report highlighted differences in quality of care among racial and ethnic groups, there was only very limited information available for American Indians. The Indian Health Service (IHS) is currently enhancing its information systems to improve the measurement of health care quality as well as to support quality improvement initiatives. We summarize current knowledge regarding health care quality for American Indians, highlighting the variation in reported measures in the existing literature. We then discuss how the IHS is using information systems to produce standardized performance measures and present future directions for improving American Indian health care quality.}, author = {Sequist, Thomas D and Cullen, Theresa and Ayanian, John Z} } @article {587, title = {The medical imaging interaction toolkit.}, journal = {Medical image analysis}, volume = {9}, year = {2005}, month = {2005 Dec}, pages = {594-604}, abstract = {Thoroughly designed, open-source toolkits emerge to boost progress in medical imaging. The Insight Toolkit (ITK) provides this for the algorithmic scope of medical imaging, especially for segmentation and registration. But medical imaging algorithms have to be clinically applied to be useful, which additionally requires visualization and interaction. The Visualization Toolkit (VTK) has powerful visualization capabilities, but only low-level support for interaction. In this paper, we present the Medical Imaging Interaction Toolkit (MITK). The goal of MITK is to significantly reduce the effort required to construct specifically tailored, interactive applications for medical image analysis. MITK allows an easy combination of algorithms developed by ITK with visualizations created by VTK and extends these two toolkits with those features, which are outside the scope of both. MITK adds support for complex interactions with multiple states as well as undo-capabilities, a very important prerequisite for convenient user interfaces. Furthermore, MITK facilitates the realization of multiple, different views of the same data (as a multiplanar reconstruction and a 3D rendering) and supports the visualization of 3D+t data, whereas VTK is only designed to create one kind of view of 2D or 3D data. MITK reuses virtually everything from ITK and VTK. Thus, it is not at all a competitor to ITK or VTK, but an extension, which eases the combination of both and adds the features required for interactive, convenient to use medical imaging software. MITK is an open-source project (www.mitk.org).}, keywords = {Algorithms, Artificial Intelligence, Computer Graphics, Diagnostic Imaging, Image Enhancement, Image Interpretation, Computer-Assisted, Imaging, Three-Dimensional, Pattern Recognition, Automated, Software, User-Computer Interface}, issn = {1361-8415}, author = {Wolf, Ivo and Vetter, Marcus and Wegner, Ingmar and B{\"o}ttger, Thomas and Nolden, Marco and Sch{\"o}binger, Max and Hastenteufel, Mark and Kunert, Tobias and Meinzer, Hans-Peter} } @article {500, title = {Advances in functional and structural MR image analysis and implementation as FSL.}, journal = {NeuroImage}, volume = {23 Suppl 1}, year = {2004}, month = {2004}, pages = {S208-19}, abstract = {The techniques available for the interrogation and analysis of neuroimaging data have a large influence in determining the flexibility, sensitivity, and scope of neuroimaging experiments. The development of such methodologies has allowed investigators to address scientific questions that could not previously be answered and, as such, has become an important research area in its own right. In this paper, we present a review of the research carried out by the Analysis Group at the Oxford Centre for Functional MRI of the Brain (FMRIB). This research has focussed on the development of new methodologies for the analysis of both structural and functional magnetic resonance imaging data. The majority of the research laid out in this paper has been implemented as freely available software tools within FMRIB{\textquoteright}s Software Library (FSL).}, keywords = {Bayes Theorem, Brain, Databases, Factual, Humans, Image Processing, Computer-Assisted, Magnetic Resonance Imaging, Models, Neurological, Models, Statistical, Software}, issn = {1053-8119}, author = {Smith, Stephen M and Jenkinson, Mark and Woolrich, Mark W and Beckmann, Christian F and Behrens, Timothy E J and Johansen-Berg, Heidi and Bannister, Peter R and De Luca, Marilena and Drobnjak, Ivana and Flitney, David E and Niazy, Rami K and Saunders, James and Vickers, John and Zhang, Yongyue and De Stefano, Nicola and Brady, J Michael and Matthews, Paul M} } @article {15361001, title = {The RODS Open Source Project: removing a barrier to syndromic surveillance.}, journal = {Studies in health technology and informatics}, volume = {107}, year = {2004}, month = {2004}, pages = {1192-6}, abstract = {The goal of the Real-time Outbreak and Disease Surveillance (RODS) Open Source Project is to accelerate deployment of computer-based syndromic surveillance. To this end, the project has released the RODS software under the GNU General Public License and created an organizational structure to catalyze its development. This paper describes the design of the software, requested extensions, and the structure of the development effort.}, author = {Espino, Jeremy U and Wagner, Michael M and Tsui, Fu-Chang and Su, Hoah-Der and Olszewski, Robert T and Lie, Zhen and Chapman, Wendy and Zeng, Xiaoming and Ma, Lili and Lu, Zhong Wei and Dara, Jagan} } @article {12810121, title = {Open Source software in medical informatics--why, how and what.}, journal = {International journal of medical informatics}, volume = {69}, year = {2003}, month = {2003 Mar}, pages = {175-84}, abstract = {{\textquoteright}Open Source{\textquoteright} is a 20-40 year old approach to licensing and distributing software that has recently burst into public view. Against conventional wisdom this approach has been wildly successful in the general software market--probably because the openness lets programmers the world over obtain, critique, use, and build upon the source code without licensing fees. Linux, a UNIX-like operating system, is the best known success. But computer scientists at the University of California, Berkeley began the tradition of software sharing in the mid 1970s with BSD UNIX and distributed the major internet network protocols as source code without a fee. Medical informatics has its own history of Open Source distribution: Massachusetts General{\textquoteright}s COSTAR and the Veterans Administration{\textquoteright}s VISTA software have been distributed as source code at no cost for decades. Bioinformatics, our sister field, has embraced the Open Source movement and developed rich libraries of open-source software. Open Source has now gained a tiny foothold in health care (OSCAR GEHR, OpenEMed). Medical informatics researchers and funding agencies should support and nurture this movement. In a world where open-source modules were integrated into operational health care systems, informatics researchers would have real world niches into which they could engraft and test their software inventions. This could produce a burst of innovation that would help solve the many problems of the health care system. We at the Regenstrief Institute are doing our part by moving all of our development to the open-source model.}, author = {McDonald, Clement J and Schadow, Gunther and Barnes, Michael and Dexter, Paul and Overhage, J Marc and Mamlin, Burke and McCoy, J Michael} }