Skip to content

Commit

Permalink
Merge branch 'master' into issue-1113
Browse files Browse the repository at this point in the history
  • Loading branch information
prb112 committed May 26, 2020
2 parents bff55ed + 85e7102 commit ba3c1ad
Show file tree
Hide file tree
Showing 150 changed files with 17,515 additions and 7,834 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ To use the artifacts from a Maven project:
|------|-----------|----------|
|fhir-model|An object model generated from the FHIR R4 specification and corresponding parsers and generators for XML and JSON|true|
|fhir-registry|A resource registry, registry provider interfaces, and pre-registered resources shipped with the FHIR specification|false|
|fhir-term|A terminology service provider interface with a default implementation that implements the ValueSet expand operation|false|
|fhir-profile|Helper methods for validating ValueSet membership and Profile conformance|false|
|fhir-path|An implementation of version 2.0.0 of the FHIRPath specification assumed by FHIR R4|false|
|fhir-validation|Validation utility for validating resource instances against the base specification and/or configured profiles|false|
Expand Down
44 changes: 42 additions & 2 deletions build/docker/deploySchemaAndTenant.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,53 @@
# SPDX-License-Identifier: Apache-2.0
###############################################################################
set -ex
set +o pipefail

# The full path to the directory of this script, no matter where its called from
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd ${DIR}

java -jar schema/fhir-persistence-schema-*-cli.jar \
--prop-file db2.properties --schema-name FHIRDATA --create-schemas
# Makes a temp file to store the output
TMP_FILE=`mktemp`

# Loop up to 4
not_ready=true
retry_count=0
while [ "$not_ready" == "true" ]
do
EXIT_CODE="-1"
java -jar schema/fhir-persistence-schema-*-cli.jar \
--prop-file db2.properties --schema-name FHIRDATA --create-schemas | tee -a ${TMP_FILE}
EXIT_CODE="${PIPESTATUS[0]}"
LOG_OUT=`cat ${TMP_FILE}`
if [ "$EXIT_CODE" == "0" ]
then
# We now just send out the output and stop the loop
echo "${LOG_OUT}"
not_ready="false"
elif [ "$EXIT_CODE" == "4" ] || [ echo "$LOG_OUT" | grep -q "SQLCODE=-1035, SQLSTATE=57019" ]
then
# EXIT_NOT_READY = 4 - we know in certain versions that this is to be automatically re-tried
retry_count=$((retry_count++))
if [ $retry_count -lt 4 ]
then
echo "Waiting for the Database to be ready - Sleeping"
sleep 60
else
echo "Reached Limit while waiting"
echo "$LOG_OUT"
exit "$EXIT_CODE"
fi
else
# We need to check and/or fail.
echo "$LOG_OUT"
fi
done

if -f ${TMP_FILE}
then
rm ${TMP_FILE}
fi

java -jar schema/fhir-persistence-schema-*-cli.jar \
--prop-file db2.properties --schema-name FHIRDATA --update-schema --pool-size 2
Expand Down
14 changes: 13 additions & 1 deletion build/release/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,30 @@ function _mvn {
check_and_fail $? "${FUNCNAME[0]} - stopped - ${PROJECT_PATH}"
}

# _mvn2 - executes mvn to prep the install
function _mvn2 {
announce "${FUNCNAME[0]}"
PROJECT_PATH="$1"
PROFILES="$2"

# Batch mode without the transfer updates.
mvn ${THREAD_COUNT} -ntp -B "${PROFILES}" install -DskipTests -f ${PROJECT_PATH}
check_and_fail $? "${FUNCNAME[0]} - stopped - ${PROJECT_PATH}"
}

# build_all - build all versions
function build_all {
_mvn 'fhir-tools' '-Pdeploy-bintray,fhir-javadocs'
_mvn 'fhir-examples' '-Pdeploy-bintray,fhir-javadocs'
_mvn2 'fhir-parent' '-Pdeploy-bintray'

PROFILES_ARR=(integration)
PROFILES_ARR+=(model-all-tests)
PROFILES_ARR+=(validation-all-tests)
PROFILES_ARR+=(search-all-tests)
PROFILES_ARR+=(jdbc-all-tests)
PROFILES=$(IFS=, ; echo "${PROFILES_ARR[*]}")
_mvn 'fhir-parent' "-Pdeploy-bintray,fhir-javadocs,fhir-validation-distribution,fhir-ig-carin-bb,fhir-ig-davinci-pdex-plan-net,fhir-ig-mcode,fhir-ig-us-core,${PROFILES}"
_mvn 'fhir-parent' "-Pdeploy-bintray,fhir-javadocs,fhir-validation-distribution,fhir-ig-carin-bb,fhir-ig-davinci-pdex-plan-net,fhir-ig-mcode,fhir-ig-us-core,fhir-term,${PROFILES}"
}

###############################################################################
Expand Down
3 changes: 2 additions & 1 deletion build/release/release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,9 @@ function deploy_via_curl {
done

# The general zip FILE logic is changed to do fhir-validation-distribution.zip and fhir-cli.zip only
for ZIP_FILE in `find ${PROJ}/target -name 'fhir-validation-distribution.zip' -or -name 'fhir-cli.zip' -maxdepth 1 -exec basename {} \;`
for ZIP_FILE in `find ${PROJ}/target -name fhir-validation-distribution.zip -or -name fhir-cli.zip -maxdepth 1`
do
ZIP_FILE=`basename ${ZIP_FILE}`
echo " - Uploading zip: ${ZIP_FILE}"
FILE_TARGET_PATH="/com/ibm/fhir/${PROJ}/${BUILD_VERSION}/${ZIP_FILE}"
STATUS=$(curl -T "${PROJ}/target/${ZIP_FILE}" -u${BINTRAY_USERNAME}:${BINTRAY_PASSWORD} -H "X-Bintray-Package:${PROJ}" -H "X-Bintray-Version:${BUILD_VERSION}" https://api.bintray.com/content/ibm-watson-health/ibm-fhir-server-${TYPE}${FILE_TARGET_PATH} -o /dev/null -w '%{http_code}')
Expand Down
1 change: 1 addition & 0 deletions docs/src/pages/guides/CreateFHIRValidationAssembly.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ Archive: ./tmp-fhir4/FHIR/fhir-validation/target/fhir-validation-distribution.z
- fhir-ig-davinci-pdex-plan-net
- fhir-ig-mcode
- fhir-ig-us-core
- fhir-term - include the terminology module
- `fhir-ig-user-defined` - A user defined profile

If you chose to add the user defined profile, you must pass in the name of the dependency using a commandline parameter `-Dfhir-ig-user-defined=fhir-ig-example`.
Expand Down
8 changes: 7 additions & 1 deletion docs/src/pages/guides/FHIRServerUsersGuide.md
Original file line number Diff line number Diff line change
Expand Up @@ -624,7 +624,7 @@ To implement a persistence interceptor, complete the following steps:
`com.ibm.mysolution.MyInterceptor`
3. Copy your jar to the `<WLP_HOME>/usr/servers/fhir-server/config` directory so that it is accessible to the FHIR server via the classpath (the `server.xml` file contains a library element that defines this directory as a shared library).
3. Copy your jar to the `<WLP_HOME>/usr/servers/fhir-server/userlib` directory so that it is accessible to the FHIR server via the classpath (the `server.xml` file contains a library element that defines this directory as a shared library).
4. Re-start the FHIR server.
Expand Down Expand Up @@ -1486,6 +1486,8 @@ This section contains reference information about each of the configuration prop
|`fhirServer/bulkdata/validBaseUrls`|string|The list of supported urls which are approved for the fhir server to access|
|`fhirServer/bulkdata/validBaseUrlsDisabled`|boolean|Disables the URL checking feature|
|`fhirServer/bulkdata/maxInputPerRequest`|integer|The maximum inputs per bulk import|
|`fhirServer/bulkdata/cosFileMaxResources`|int|The maximum number of FHIR resources per COS file, "-1" means no limit, the default value is 500000 |
|`fhirServer/bulkdata/cosFileMaxSize`|int|The maximum COS file size in bytes, "-1" means no limit, the default value is 209715200 (200M) |


### 5.1.2 Default property values
Expand Down Expand Up @@ -1532,6 +1534,8 @@ This section contains reference information about each of the configuration prop
|`fhirServer/audit/serviceProperties/geoCounty`|US|
|`fhirServer/bulkdata/isExportPublic`|true|
|`fhirServer/bulkdata/validBaseUrlsDisabled`|false|
|`fhirServer/bulkdata/cosFileMaxResources`|500000|
|`fhirServer/bulkdata/cosFileMaxSize`|209715200|


### 5.1.3 Property attributes
Expand Down Expand Up @@ -1596,6 +1600,8 @@ must restart the server for that change to take effect.
|`fhirServer/bulkdata/validBaseUrls`|Y|Y|
|`fhirServer/bulkdata/maxInputPerRequest`|Y|Y|
|`fhirServer/bulkdata/validBaseUrlsDisabled`|Y|Y|
|`fhirServer/bulkdata/cosFileMaxResources`|N|Y|
|`fhirServer/bulkdata/cosFileMaxSize`|N|Y|

## 5.2 Keystores, truststores, and the FHIR server

Expand Down
2 changes: 1 addition & 1 deletion docs/src/pages/guides/FHIRValidationGuide.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Given a FHIR profile (structure definition) as input, the IBM FHIR Server Profil

- Cardinality constraints (required and prohibited elements)
- Fixed value constraints (Code and Uri data types)
- Pattern value constraints (CodeableConcept daa type)
- Pattern value constraints (CodeableConcept and Identifier data types)
- Reference type constraints (FHIRPath resolve/is/conformsTo functions)
- Extension constraints (FHIRPath `conformsTo` function)
- Vocabulary constraints (FHIRPath `memberOf` function)
Expand Down
4 changes: 4 additions & 0 deletions fhir-bulkimportexport-webapp/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@
<groupId>org.glassfish</groupId>
<artifactId>jakarta.json</artifactId>
</dependency>
<dependency>
<groupId>jakarta.enterprise</groupId>
<artifactId>jakarta.enterprise.cdi-api</artifactId>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>fhir-persistence</artifactId>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<job xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://xmlns.jcp.org/xml/ns/javaee" xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/jobXML_1_0.xsd" id="bulkexportchunkjob" restartable="true" version="1.0">
<properties>
<property name="cos.pagesperobject" value="#{jobParameters['cos.pagesperobject']}?:10;" />
</properties>
<listeners>
<listener ref="com.ibm.fhir.bulkexport.system.ExportJobListener"/>
</listeners>
<step id="step1">
<chunk checkpoint-policy="custom" item-count="#{jobProperties['cos.pagesperobject']}">
<chunk checkpoint-policy="custom">
<reader ref="com.ibm.fhir.bulkexport.system.ChunkReader">
<properties >
<property name="partition.resourcetype" value="#{partitionPlan['partition.resourcetype']}"/>
<property name="fhir.tenant" value="#{jobParameters['fhir.tenant']}"/>
<property name="fhir.datastoreid" value="#{jobParameters['fhir.datastoreid']}"/>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
<property name="fhir.search.fromdate" value="#{jobParameters['fhir.search.fromdate']}"/>
<property name="fhir.search.todate" value="#{jobParameters['fhir.search.todate']}"/>
<property name="fhir.search.pagesize" value="#{jobParameters['fhir.search.pagesize']}"/>
<property name="cos.bucket.objectname" value="#{jobParameters['cos.bucket.objectname']}"/>
<property name="fhir.typeFilters" value="#{jobParameters['fhir.typeFilters']}"/>
</properties>
</reader>
Expand All @@ -26,16 +25,24 @@
<property name="cos.credential.ibm" value="#{jobParameters['cos.credential.ibm']}"/>
<property name="cos.bucket.name" value="#{jobParameters['cos.bucket.name']}"/>
<property name="cos.bucket.pathprefix" value="#{jobParameters['cos.bucket.pathprefix']}"/>
<property name="cos.bucket.objectname" value="#{jobParameters['cos.bucket.objectname']}"/>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
<property name="partition.resourcetype" value="#{partitionPlan['partition.resourcetype']}"/>
</properties>
</writer>
<checkpoint-algorithm ref="com.ibm.fhir.bulkexport.common.CheckPointAlgorithm">
<properties>
<property name="cos.pagesperobject" value="#{jobParameters['cos.pagesperobject']}"/>
<property name="cos.bucket.maxfilesize" value="#{jobParameters['cos.bucket.maxfilesize']}"/>
<property name="cos.bucket.filemaxsize" value="#{jobParameters['cos.bucket.filemaxsize']}"/>
<property name="cos.bucket.filemaxresources" value="#{jobParameters['cos.bucket.filemaxresources']}"/>
</properties>
</checkpoint-algorithm>
</chunk>
<partition>
<mapper ref="com.ibm.fhir.bulkexport.system.SystemExportPartitionMapper">
<properties>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
</properties>
</mapper>
<collector ref="com.ibm.fhir.bulkexport.system.ExportPartitionCollector"/>
<analyzer ref="com.ibm.fhir.bulkexport.system.ExportPartitionAnalyzer"/>
</partition>
</step>
</job>
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<job xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://xmlns.jcp.org/xml/ns/javaee" xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/jobXML_1_0.xsd" id="bulkgroupexportchunkjob" restartable="true" version="1.0">
<listeners>
<listener ref="com.ibm.fhir.bulkexport.system.ExportJobListener"/>
</listeners>
<step id="step1">
<chunk checkpoint-policy="item" item-count="1">
<chunk checkpoint-policy="custom">
<reader ref="com.ibm.fhir.bulkexport.group.ChunkReader">
<properties >
<property name="fhir.tenant" value="#{jobParameters['fhir.tenant']}"/>
<property name="fhir.datastoreid" value="#{jobParameters['fhir.datastoreid']}"/>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
<property name="partition.resourcetype" value="#{partitionPlan['partition.resourcetype']}"/>
<property name="fhir.search.fromdate" value="#{jobParameters['fhir.search.fromdate']}"/>
<property name="fhir.search.todate" value="#{jobParameters['fhir.search.todate']}"/>
<property name="fhir.search.pagesize" value="#{jobParameters['fhir.search.pagesize']}"/>
<property name="fhir.search.patientgroupid" value="#{jobParameters['fhir.search.patientgroupid']}"/>
<property name="fhir.typeFilters" value="#{jobParameters['fhir.typeFilters']}"/>
</properties>
</reader>
<writer ref="com.ibm.fhir.bulkexport.patient.ChunkWriter">
<writer ref="com.ibm.fhir.bulkexport.system.ChunkWriter">
<properties>
<property name="cos.api.key" value="#{jobParameters['cos.api.key']}"/>
<property name="cos.srvinst.id" value="#{jobParameters['cos.srvinst.id']}"/>
Expand All @@ -23,9 +26,24 @@
<property name="cos.credential.ibm" value="#{jobParameters['cos.credential.ibm']}"/>
<property name="cos.bucket.name" value="#{jobParameters['cos.bucket.name']}"/>
<property name="cos.bucket.pathprefix" value="#{jobParameters['cos.bucket.pathprefix']}"/>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
<property name="partition.resourcetype" value="#{partitionPlan['partition.resourcetype']}"/>
</properties>
</writer>
<checkpoint-algorithm ref="com.ibm.fhir.bulkexport.common.CheckPointAlgorithm">
<properties>
<property name="cos.bucket.filemaxsize" value="#{jobParameters['cos.bucket.filemaxsize']}"/>
<property name="cos.bucket.filemaxresources" value="#{jobParameters['cos.bucket.filemaxresources']}"/>
</properties>
</checkpoint-algorithm>
</chunk>
<partition>
<mapper ref="com.ibm.fhir.bulkexport.patient.PatientExportPartitionMapper">
<properties>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
</properties>
</mapper>
<collector ref="com.ibm.fhir.bulkexport.system.ExportPartitionCollector"/>
<analyzer ref="com.ibm.fhir.bulkexport.system.ExportPartitionAnalyzer"/>
</partition>
</step>
</job>
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<job xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://xmlns.jcp.org/xml/ns/javaee" xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee http://xmlns.jcp.org/xml/ns/javaee/jobXML_1_0.xsd" id="bulkpatientexportchunkjob" restartable="true" version="1.0">
<properties>
<property name="cos.pagesperobject" value="#{jobParameters['cos.pagesperobject']}?:10;" />
</properties>
<listeners>
<listener ref="com.ibm.fhir.bulkexport.system.ExportJobListener"/>
</listeners>
<step id="step1">
<chunk checkpoint-policy="custom" item-count="#{jobProperties['cos.pagesperobject']}">
<chunk checkpoint-policy="custom">
<reader ref="com.ibm.fhir.bulkexport.patient.ChunkReader">
<properties >
<property name="fhir.tenant" value="#{jobParameters['fhir.tenant']}"/>
<property name="fhir.datastoreid" value="#{jobParameters['fhir.datastoreid']}"/>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
<property name="partition.resourcetype" value="#{partitionPlan['partition.resourcetype']}"/>
<property name="fhir.search.fromdate" value="#{jobParameters['fhir.search.fromdate']}"/>
<property name="fhir.search.todate" value="#{jobParameters['fhir.search.todate']}"/>
<property name="fhir.search.pagesize" value="#{jobParameters['fhir.search.pagesize']}"/>
<property name="fhir.typeFilters" value="#{jobParameters['fhir.typeFilters']}"/>
</properties>
</reader>
<writer ref="com.ibm.fhir.bulkexport.patient.ChunkWriter">
<writer ref="com.ibm.fhir.bulkexport.system.ChunkWriter">
<properties>
<property name="cos.api.key" value="#{jobParameters['cos.api.key']}"/>
<property name="cos.srvinst.id" value="#{jobParameters['cos.srvinst.id']}"/>
Expand All @@ -25,15 +25,24 @@
<property name="cos.credential.ibm" value="#{jobParameters['cos.credential.ibm']}"/>
<property name="cos.bucket.name" value="#{jobParameters['cos.bucket.name']}"/>
<property name="cos.bucket.pathprefix" value="#{jobParameters['cos.bucket.pathprefix']}"/>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
<property name="partition.resourcetype" value="#{partitionPlan['partition.resourcetype']}"/>
</properties>
</writer>
<checkpoint-algorithm ref="com.ibm.fhir.bulkexport.common.CheckPointAlgorithm">
<properties>
<property name="cos.pagesperobject" value="#{jobParameters['cos.pagesperobject']}"/>
<property name="cos.bucket.maxfilesize" value="#{jobParameters['cos.bucket.maxfilesize']}"/>
<property name="cos.bucket.filemaxsize" value="#{jobParameters['cos.bucket.filemaxsize']}"/>
<property name="cos.bucket.filemaxresources" value="#{jobParameters['cos.bucket.filemaxresources']}"/>
</properties>
</checkpoint-algorithm>
</chunk>
<partition>
<mapper ref="com.ibm.fhir.bulkexport.patient.PatientExportPartitionMapper">
<properties>
<property name="fhir.resourcetype" value="#{jobParameters['fhir.resourcetype']}"/>
</properties>
</mapper>
<collector ref="com.ibm.fhir.bulkexport.system.ExportPartitionCollector"/>
<analyzer ref="com.ibm.fhir.bulkexport.system.ExportPartitionAnalyzer"/>
</partition>
</step>
</job>
Loading

0 comments on commit ba3c1ad

Please sign in to comment.