...
 
Commits (640)
......@@ -36,10 +36,25 @@
.DS_Store
# IDEs
.idea
.idea/
cmake-build-*
.kdev4
.vscode
# Temporary structure
temporary*/
# Source
Documentation/build
# SuperBench output
TransformParameters.*
registration_driver.log
# Dropbox sync
.dropbox.attr
# Images
*.nii.gz
*.mhd
*.raw
\ No newline at end of file
language: cpp
language:
- cpp
- python
env:
global:
......@@ -30,84 +32,89 @@ matrix:
compiler: gcc
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.9
env:
- GCC="CC=gcc-4.9 && CXX=g++-4.9"
sources: ubuntu-toolchain-r-test
packages: [g++-5, python3, python3-pip, libinsighttoolkit4*]
env: PYTHON=3.5 GCC="CC=gcc-5 && CXX=g++-5"
- os: linux
dist: trusty
sudo: required
compiler: gcc
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env:
- GCC="CC=gcc-5 && CXX=g++-5"
sources: ubuntu-toolchain-r-test
packages: [g++-6, python3, python3-pip, libinsighttoolkit4*]
env: PYTHON=3.5 GCC="CC=gcc-6 && CXX=g++-6"
- os: linux
dist: trusty
sudo: required
compiler: gcc
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-6
env:
- GCC="CC=gcc-6 && CXX=g++-6"
- os: linux
dist: trusty
sudo: required
compiler: gcc
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
env:
- GCC="CC=gcc-7 && CXX=g++-7"
sources: ubuntu-toolchain-r-test
packages: [g++-7, python3, python3-pip, libinsighttoolkit4*]
env: PYTHON=3.5 GCC="CC=gcc-7 && CXX=g++-7"
- os: osx
compiler: clang
osx_image: xcode8.3
env: PYTHON=3.5 CPP=14 CLANG
- os: osx
compiler: gcc
osx_image: xcode8.3
env: PYTHON=3.5 CPP=14 CLANG
before_install:
- root=$PWD
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then eval ${GCC}; fi;
before_script:
- mkdir -p ${SUPERELASTIX_BUILD_DIR}
- mkdir -p ${ExternalData_OBJECT_STORES}
- if [[ "$TRAVIS_OS_NAME" == "linux" && ! -e "${ExternalData_OBJECT_STORES}/${CMAKE_DOWNLOAD_FILE}" ]]; then (cd ${ExternalData_OBJECT_STORES} && curl -sSO https://cmake.org/files/v3.6/${CMAKE_DOWNLOAD_FILE}); fi;
- if [[ "$TRAVIS_OS_NAME" == "linux" && ! -e "${ExternalData_OBJECT_STORES}/${CMAKE_DOWNLOAD_FILE}" ]]; then (cd ${ExternalData_OBJECT_STORES} && curl -sSO https://cmake.org/files/v3.6/${CMAKE_DOWNLOAD_FILE}); fi;
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo bash "${ExternalData_OBJECT_STORES}/${CMAKE_DOWNLOAD_FILE}" --prefix=/usr/local --exclude-subdir; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then cmake --version; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
brew upgrade python &&
curl https://bootstrap.pypa.io/get-pip.py | python3 &&
brew list python | grep pip; fi
script:
install:
- if [[ ! -e ${ITK_BUILD_DIR}/ITKConfig.cmake ]]; then
rm -rf ${ITK_SOURCE_DIR} {ITK_BUILD_DIR} &&
git clone ${ITK_URL} ${ITK_SOURCE_DIR} &&
cd ${ITK_SOURCE_DIR} &&
git checkout v4.12.2 &&
git checkout v4.13.0 &&
mkdir -p ${ITK_BUILD_DIR} &&
cd ${ITK_BUILD_DIR} &&
cmake -DBUILD_EXAMPLES=OFF -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=Release -DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON -DCMAKE_VISIBILITY_INLINES_HIDDEN:BOOL=ON -DBUILD_SHARED_LIBS:BOOL=OFF -DCMAKE_SKIP_RPATH:BOOL=ON -DITK_LEGACY_REMOVE:BOOL=ON -DITK_USE_KWSTYLE:BOOL=OFF -DITK_BUILD_DEFAULT_MODULES:BOOL=ON -DModule_ITKReview:BOOL=ON ${ITK_SOURCE_DIR} &&
make --jobs=4; fi
- if [[ ! -e ${ELASTIX_BUILD_DIR}/ElastixConfig.cmake ]]; then
make --jobs=2 &&
touch ${HOME_DIR}/built_cache; fi
- if [[ ! -e ${ELASTIX_BUILD_DIR}/ElastixConfig.cmake && ! -e ${HOME_DIR}/built_cache ]]; then
rm -rf ${ELASTIX_SOURCE_DIR} ${ELASTIX_BUILD_DIR} &&
git clone ${ELASTIX_URL} ${ELASTIX_SOURCE_DIR} &&
cd ${ELASTIX_SOURCE_DIR} &&
git checkout e8354222396c6bd78e9fe6d5a39e4ecac4bb0d52 &&
git checkout 0ce6c24a2b74c0132f1a53848321f1c7a92162ea &&
mkdir -p ${ELASTIX_BUILD_DIR} &&
cd ${ELASTIX_BUILD_DIR} &&
cmake -DBUILD_TESTING:BOOL=OFF -DELASTIX_BUILD_EXECUTABLE:BOOL=OFF -DITK_DIR=${ITK_BUILD_DIR} -DCMAKE_BUILD_TYPE=Release ${ELASTIX_SOURCE_DIR} &&
make --jobs=4 &&
make --jobs=2 &&
touch ${HOME_DIR}/built_cache; fi
- cd ${SUPERELASTIX_BUILD_DIR}
- if [[ ! -e ${HOME_DIR}/built_cache ]]; then
cmake -DBUILD_APPLICATIONS_EXTERNAL_PROJECT:BOOL=ON -DBUILD_TESTING:BOOL=ON -DCMAKE_BUILD_TYPE=Release -DUSE_SYSTEM_ITK:BOOL=ON -DITK_DIR=${ITK_BUILD_DIR} -DUSE_SYSTEM_ELASTIX:BOOL=ON -DElastix_DIR=${ELASTIX_BUILD_DIR} ${SUPERELASTIX_SOURCE_DIR} &&
make --jobs=4; fi
make --jobs=2; fi
#before_script:
# - cd $root
# - mkdir temp
# - python3 --version && pip3 --version
# - sudo pip3 install -U pip
# - pip3 install --user -r requirements.txt
# - sudo pip3 install --ignore-installed nose pytest pytest-cov coverage codecov codacy-coverage
script: sleep 1 # disable default call ./configure && make && make test
# - nosetests Tools -v --exe --with-doctest --with-xunit --with-coverage --cover-package=Tools
# - nosetests ContinuousRegistration -v --exe --with-doctest --with-xunit --with-coverage --cover-package=ContinuousRegistration --ignore-files=submit_jobs.py
# - py.test Tools -v --doctest-modules
# - py.test ContinuousRegistration -v --doctest-modules --ignore=ContinuousRegistration/Source/submit_jobs.py
# - coverage run --source Tools -m py.test Tools -v --doctest-modules
# - coverage run --source ContinuousRegistration -m py.test ContinuousRegistration -v --doctest-modules --ignore=ContinuousRegistration/Source/submit_jobs.py
# - coverage report
# - codecov
\ No newline at end of file
......@@ -35,7 +35,10 @@ find_package(Boost COMPONENTS program_options filesystem system regex REQUIRED Q
include_directories( ${Boost_INCLUDE_DIR} )
# Compile executable
include_directories( "${CMAKE_CURRENT_SOURCE_DIR}/include" )
include_directories(
"${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_BINARY_DIR}/Applications" # For selxGitRevisionSha.h
)
add_executable( SuperElastix ${COMMANDLINE_SOURCE_FILES} ${COMMANDLINE_HEADER_FILES} )
target_link_libraries( SuperElastix ${SUPERELASTIX_LIBRARIES} ${Boost_LIBRARIES} ${ITK_LIBRARIES} ${ELASTIX_LIBRARIES} )
......
../SuperElastix --conf ../Configuration/elastix_Bspline_NC.json --graphout 1B_graph_elastix_NC.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=1A_image_elastix_NC.mhd
../SuperElastix --conf ../Configuration/elastix_Bspline_NC.json --graphout 1B_graph_elastix_NC.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=1A_image_elastix_NC.mhd --loglevel trace
../SuperElastix --conf ../Configuration/elastix_Bspline_MSD.json --graphout 1B_graph_elastix_MSD.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=1B_image_elastix_MSD.mhd
../SuperElastix --conf ../Configuration/elastix_Bspline_MSD.json --graphout 1B_graph_elastix_MSD.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=1B_image_elastix_MSD.mhd --loglevel trace
../SuperElastix --conf ../Configuration/itkv4_SVF_ANTsCC.json --graphout 2A_graph_itkv4_NC.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=2A_image_itkv4_NC.mhd ResultDisplacementField=2A_deformation_itkv4_NC.mhd
../SuperElastix --conf ../Configuration/itkv4_SVF_ANTsCC.json --graphout 2A_graph_itkv4_NC.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=2A_image_itkv4_NC.mhd ResultDisplacementField=2A_deformation_itkv4_NC.mhd --loglevel trace
../SuperElastix --conf ../Configuration/itkv4_SVF_MSD.json --graphout 2B_graph_itkv4_MSD.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=2B_image_itkv4_MSD.mhd ResultDisplacementField=2B_deformation_itkv4_MSD.mhd
../SuperElastix --conf ../Configuration/itkv4_SVF_MSD.json --graphout 2B_graph_itkv4_MSD.dot --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out ResultImage=2B_image_itkv4_MSD.mhd ResultDisplacementField=2B_deformation_itkv4_MSD.mhd --loglevel trace
#!/bin/bash
../SuperElastix --conf ../Configuration/IdentityTransformRegistration.json --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out DisplacementField=IdentityDeformationField.mhd
../SuperElastix --conf ../Configuration/IdentityTransformRegistration.json --in FixedImage=../Data/coneA2d64.mhd MovingImage=../Data/coneB2d64.mhd --out DisplacementField=IdentityDeformationField.mhd --log-level trace
# data from Namic - Deformable registration speed optimization http://www.insight-journal.org/midas/collection/view/29
../SuperElastix --conf ../Configuration/elastix3d.json --loglevel debug --in FixedImage=N012_S03_tof3d_multi_slab.mha MovingImage=N026_S02_tof3d_multi_slab.mha --out ResultImage=elastix3d_warped.mhd ResultDisplacementField=elastix3d_displacement.mhd
../SuperElastix --conf ../Configuration/elastix3d.json --loglevel debug --in FixedImage=N012_S03_tof3d_multi_slab.mha MovingImage=N026_S02_tof3d_multi_slab.mha --out ResultImage=elastix3d_warped.mhd ResultDisplacementField=elastix3d_displacement.mhd --log-level trace
......@@ -21,6 +21,7 @@
#include "selxAnyFileReader.h"
#include "selxAnyFileWriter.h"
#include "selxLogger.h"
#include "selxGitInfo.h"
#include <boost/algorithm/string.hpp>
#include <boost/filesystem.hpp>
......@@ -93,6 +94,7 @@ main( int ac, char * av[] )
boost::program_options::options_description desc("Allowed options");
desc.add_options()
( "help", "produce help message" )
( "revision-sha", "produce git revision SHA-1 hash of SuperElastix source" )
("conf", boost::program_options::value< VectorOfPathsType >(&configurationPaths)->required()->multitoken(), "Configuration file: single or multiple Blueprints [.xml|.json]")
("in", boost::program_options::value< VectorOfStringsType >(&inputPairs)->multitoken(), "Input data: images, labels, meshes, etc. Usage arg: <name>=<path> (or multiple pairs)")
("out", boost::program_options::value< VectorOfStringsType >(&outputPairs)->multitoken(), "Output data: images, labels, meshes, etc. Usage arg: <name>=<path> (or multiple pairs)")
......@@ -105,7 +107,14 @@ main( int ac, char * av[] )
if( vm.count( "help" ) )
{
std::cout << desc << "\n";
std::cout
<< "SuperElastix GIT Revision SHA-1: " << selx::GitInfo::GetRevisionSha() << '\n'
<< desc << std::endl;
return 0;
}
if (vm.count("revision-sha"))
{
std::cout << selx::GitInfo::GetRevisionSha() << std::endl;
return 0;
}
boost::program_options::notify(vm);
......
......@@ -17,22 +17,80 @@
#
#=========================================================================
add_integration_test(
NAME SuperElastixIntegrationTest
DRIVER SuperElastix
ARGUMENTS --conf ${CMAKE_SOURCE_DIR}/Testing/Data/Configuration/itkv4_SVF_ANTsCC.json --in FixedImage=${CMAKE_BINARY_DIR}/Testing/Data/Input/coneA2d64.mhd MovingImage=${CMAKE_BINARY_DIR}/Testing/Data/Input/coneB2d64.mhd --out ResultImage=2A_image_itkv4_NC.mhd ResultDisplacementField=2A_deformation_itkv4_NC.mhd
)
# Integration tests are "short" tests that typically test a combination of components by using the SuperElastix commandline interface with a configuration file.
# Possible grand challenge CMake interface:
#
# add_grand_challenge_submission(
# NAME username
# DRIVER GrandChallengeDriver
# ARGUMENTS ...
# )
#
# The CMake mechanism would be similar to that of integration tests, except
# we would allow users to write their own driver and/or use an executable that
# contains additional functionality for saving results. The macro would also
# invoke a script that uploads results and configurition files to website.
#Application Demo 1A: should match <source>/Applications/CommandLineInterface/Demo/Scripts_[Linux|Windows]/
add_test(NAME Integration_Demo_1A COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_1A.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/elastix_Bspline_NC.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_1A.dot
--in FixedImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
MovingImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneB2d64.mhd
--out ResultImage=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_1A_image_elastix_NC.mhd)
#Application Demo 1B: should match <source>/Applications/CommandLineInterface/Demo/Scripts_[Linux|Windows]/
add_test(NAME Integration_Demo_1B COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_1B.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/elastix_Bspline_MSD.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_1B.dot
--in FixedImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
MovingImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneB2d64.mhd
--out ResultImage=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_1B_image_elastix_MSD.mhd)
#Application Demo 2A: should match <source>/Applications/CommandLineInterface/Demo/Scripts_[Linux|Windows]/
add_test(NAME Integration_Demo_2A COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2A.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/itkv4_SVF_ANTsCC.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2A.dot
--in FixedImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
MovingImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneB2d64.mhd
--out ResultImage=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2A_image_itkv4_NC.mhd
ResultDisplacementField=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2A_deformation_itkv4_NC.mhd)
#Application Demo 2B: should match <source>/Applications/CommandLineInterface/Demo/Scripts_[Linux|Windows]/
add_test(NAME Integration_Demo_2B COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2B.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/itkv4_SVF_MSD.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2B.dot
--in FixedImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
MovingImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneB2d64.mhd
--out ResultImage=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2B_image_itkv4_MSD.mhd
ResultDisplacementField=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_Demo_2B_deformation_itkv4_MSD.mhd)
add_test(NAME Integration_WarpByItkTransform COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_WarpByItkTransform.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/itk_warper.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_WarpByItkTransform.dot
--in FixedAndMovingImageSource=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
TransformSource=${SUPERELASTIX_INPUT_DATA_DIR}/ItkAffine2Dtransform.tfm
--out ResultImageSink=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_WarpByItkTransform.mhd)
add_test(NAME Integration_ComposeBlueprintElastix COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintElastix.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/elastix_Base.json
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/elastix_Blueprint_Bspline_MSD.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintElastix.dot
--in FixedImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
MovingImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneB2d64.mhd
--out ResultImage=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintElastix.mhd)
add_test(NAME Integration_ComposeBlueprintItk COMMAND SuperElastix
--logfile ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintElastix.log
--loglevel trace
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/itkv4_Base.json
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/itkv4_Affine_MSD.json
--conf ${SUPERELASTIX_CONFIGURATION_DATA_DIR}/itk_TransformSink.json
--graphout ${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintItk.dot
--in FixedImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneA2d64.mhd
MovingImage=${SUPERELASTIX_INPUT_DATA_DIR}/coneB2d64.mhd
--out ResultImage=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintItk.mhd
ResultDisplacementField=${SUPERELASTIX_OUTPUT_DATA_DIR}/Integration_ComposeBlueprintItk_def.mhd
ResultTransform=Integration_ComposeBlueprintItk.tfm)
\ No newline at end of file
......@@ -51,7 +51,7 @@ macro( _selxapplications_initialize )
message( STATUS " ${APPLICATION}" )
option( "USE_${APPLICATION}" OFF )
option( "USE_${APPLICATION}" ON )
set( "${APPLICATION}_CMAKE_FILE" ${CMAKE_SOURCE_DIR}/${APPLICATION_CMAKE_FILE} )
set( "${APPLICATION}_IS_ENABLED" FALSE )
......
......@@ -77,6 +77,17 @@ if( ${CMAKE_CXX_COMPILER_ID} STREQUAL MSVC )
"Please move the build directory to a directory with a shorter path."
)
endif()
# Avoid many warnings, originating mainly from NiftyReg
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
# Avoid many warnings, originating mainly from GoogleTest
add_definitions(-D_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING)
if( ${CMAKE_CONFIGURATION_TYPES} STREQUAL "Debug" )
# For a Visual C++ Debug build, use 64-bit toolset, to avoid link errors.
set(CMAKE_GENERATOR_TOOLSET "host=x64" CACHE STRING "SuperElastix Visual C++ Debug uses 64-bit toolset" FORCE)
endif()
# Explicitly add INCREMENTAL linking option to command lines.
# http://www.cmake.org/pipermail/cmake/2010-February/035174.html
......@@ -125,7 +136,7 @@ include_directories( ${Boost_INCLUDE_DIRS} )
# ---------------------------------------------------------------------
# SuperElastix configuration
option( BUILD_APPLICATIONS "Build applications." OFF )
option( BUILD_APPLICATIONS "Build applications." ON )
mark_as_advanced( BUILD_SHARED_LIBS )
option( BUILD_SHARED_LIBS "Build shared libraries." OFF )
......@@ -141,7 +152,30 @@ _selxmodules_initialize()
enable_modules()
message( STATUS "Enabling modules ... Done" )
# Originally based on the answer by Naszta at
# https://stackoverflow.com/questions/6526451/how-to-include-git-commit-number-into-a-c-executable
find_package(Git)
if(GIT_FOUND)
execute_process(
COMMAND ${GIT_EXECUTABLE} rev-parse HEAD
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
OUTPUT_VARIABLE SELX_GIT_REVISION_SHA
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
else()
set(SELX_GIT_REVISION_SHA 0)
endif()
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Modules/Core/selxGitRevisionSha.h.in ${CMAKE_CURRENT_BINARY_DIR}/ModuleCore/selxGitRevisionSha.h @ONLY)
include( ${ModuleFilter_SOURCE_DIR}/CompiledLibraryComponents.cmake )
mark_as_advanced( COMPILED_LIBRARY_CONFIG_DIR )
set( COMPILED_LIBRARY_CONFIG_DIR ${PROJECT_BINARY_DIR} CACHE PATH "Path where a custom selxCompiledLibraryComponents.h can be found. Defaults to automatic generated file in ${PROJECT_BINARY_DIR}")
include_directories(${COMPILED_LIBRARY_CONFIG_DIR})
list( APPEND SUPERELASTIX_INCLUDE_DIRS ${COMPILED_LIBRARY_CONFIG_DIR} )
option( BUILD_UNIT_TESTS "Also build tests that take a long time to run." ON )
# Build applications
if( ${BUILD_APPLICATIONS} )
......@@ -154,16 +188,6 @@ endif()
# TODO: Functionality to disable default modules/applications
# ---------------------------------------------------------------------
# SuperBench
option( BUILD_SUPERBENCH "Build SuperBench continuous challenge framework." OFF )
mark_as_advanced( BUILD_SUPERBENCH )
if( BUILD_SUPERBENCH )
add_subdirectory( SuperBench )
endif()
# ---------------------------------------------------------------------
# Testing
......
......@@ -23,30 +23,45 @@ set(CTEST_BINARY_DIRECTORY ".")
set(CTEST_SITE "lkeb-selx01")
find_package( Git REQUIRED )
if(NOT DEFINED CTEST_GIT_COMMAND)
find_program(CTEST_GIT_COMMAND NAMES git git.cmd)
endif()
set(CTEST_GIT_UPDATE_CUSTOM "${CTEST_GIT_COMMAND}" pull origin)
execute_process (
COMMAND ${GIT_EXECUTABLE} rev-parse --short HEAD
WORKING_DIRECTORY ${CTEST_SOURCE_DIRECTORY}
OUTPUT_VARIABLE SELX_GIT_COMMIT_SHA
)
execute_process (
COMMAND ${GIT_EXECUTABLE} name-rev --name-only HEAD
WORKING_DIRECTORY ${CTEST_SOURCE_DIRECTORY}
OUTPUT_VARIABLE SELX_GIT_BRANCH_NAME
)
if ("$ENV{CHANGE_AUTHOR}" STREQUAL "")
set(CTEST_BUILD_NAME "$ENV{BRANCH_NAME}")
else()
set(CTEST_BUILD_NAME "$ENV{BRANCH_NAME};$ENV{CHANGE_AUTHOR}")
endif()
set(CTEST_BUILD_NAME "${CTEST_BUILD_NAME};Test")
set(CTEST_BUILD_NAME "${SELX_GIT_BRANCH_NAME};Tests;commit=SHA\\:${SELX_GIT_COMMIT_SHA}")
set(CTEST_CMAKE_GENERATOR "Unix Makefiles")
set(CTEST_BUILD_CONFIGURATION Release)
set(CTEST_BUILD_FLAGS "-j2")
set(CTEST_BUILD_FLAGS "-j4")
set(CTEST_CONFIGURE_COMMAND "${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE:STRING=${CTEST_BUILD_CONFIGURATION} --build ${CTEST_BINARY_DIRECTORY}")
set(CTEST_CONFIGURE_COMMAND "${CTEST_CONFIGURE_COMMAND} -DWITH_TESTING:BOOL=ON ${CTEST_BUILD_OPTIONS}")
set(CTEST_CONFIGURE_COMMAND "${CTEST_CONFIGURE_COMMAND} \"-G${CTEST_CMAKE_GENERATOR}\"")
set(CTEST_CONFIGURE_COMMAND "${CTEST_CONFIGURE_COMMAND} \"${CTEST_SOURCE_DIRECTORY}\"")
string(CONCAT CTEST_CONFIGURE_COMMAND "${CMAKE_COMMAND}"
" -DCMAKE_BUILD_TYPE:STRING=${CTEST_BUILD_CONFIGURATION}"
" --build ${CTEST_BINARY_DIRECTORY}"
" ${CTEST_BUILD_OPTIONS}"
" \"-G${CTEST_CMAKE_GENERATOR}\""
" \"${CTEST_SOURCE_DIRECTORY}\"")
ctest_start("Nightly")
ctest_test()
ctest_submit( PARTS Test )
# Tells CTest to not do a git pull, but to still record what version of the software it's building and testing
# As explained by mail, by Zack Galbreath
set(CTEST_UPDATE_VERSION_ONLY 1)
# For CDash integration with GitHub: https://blog.kitware.com/cdash-integration-with-github
set(CTEST_CHANGE_ID $ENV{CHANGE_ID})
ctest_start("Continuous")
# Added ctest_update() to ensure that the commit SHA will be passed to CDash, and GitHub.
ctest_update()
ctest_test(RETURN_VALUE RES)
ctest_submit( PARTS Test Update )
if (RES)
message(FATAL_ERROR "Unit tests have return code != 0")
else (RES)
message(STATUS "Unit tests have return code ${RES}")
endif(RES)
......@@ -8,7 +8,7 @@
set(CTEST_PROJECT_NAME "SuperElastix")
set(CTEST_NIGHTLY_START_TIME "00:00:00 EST")
set(CTEST_DROP_METHOD "https")
set(CTEST_DROP_SITE "my.cdash.org")
set(CTEST_DROP_METHOD "http")
set(CTEST_DROP_SITE "trunk.cdash.org")
set(CTEST_DROP_LOCATION "/submit.php?project=SuperElastix")
set(CTEST_DROP_SITE_CDASH TRUE)
# What is this?
The `Submissions` directory contains all submissions for the [Continuous Registration Challenge](https://continuousregistration.grand-challenge.org/). The `Source` directory contains code for running the submissions on several different data sets.
# How to participate
Open a pull request that creates a directory under `Submissions` which contains your parameter files and a `README.md` file that describes your implementation.
# How to run registrations
`make_registration_scripts.py` in `Source` generate shell scripts or batch scripts for running the registrations. Each script will register one pair of images. The following command will generate shell scripts for all submissions for POPI, DIRLAB, and EMPIRE data sets:
```bash
python make_registration_scripts.py \
--superelastix /path/to/superelastix/build/tree/SuperElastix-build/bin/superelastix \
--submissions-directory /path/to/superelastix/source/tree/ContinuousRegistration/Submissions \
--output-directory /tmp \
--popi-input-directory /path/to/POPI \
--dirlab-input-directory /path/to/DIRLAB \
--empire-input-directory /path/to/EMPIRE \
--lpba40-input-directory /path/to/LPBA40 \
--ibsr18-input-directory /path/to/IBSR18 \
--cumc12-input-directory /path/to/CUMC12 \
--hbia-input-directory /path/to/HistoBIA
```
This generates shell scripts to running the registrations, one for each registration. For example, on Linux you can navigate to the output directory and run
```bash
find . -name "*.sh" -exec chmod +x "{}" \;
find . -name "*.sh" -exec "{}" \;
find . -name "*.sh" -exec echo "nohup {} &" \; > parallel.sh;
```
to recursively find scripts, make them executable, and run them.
import numpy as np
# parse the numpy versions
np_version = [int(i) for i in np.version.full_version.split('.')]
# comparing strings does not work for version lower 1.10
if np_version >= [1, 14]:
np.set_printoptions(legacy='1.13')
\ No newline at end of file
{
"Components": [
{
"Name": "DisplacementField",
"NameOfClass": "ItkDisplacementFieldSourceComponent",
"Dimensionality": "3"
},
{
"Name": "WarpingDisplacementField",
"NameOfClass": "ItkDisplacementFieldSourceComponent",
"Dimensionality": "3"
},
{
"Name": "DisplacementFieldComposer",
"NameOfClass": "ItkDisplacementFieldComposerComponent",
"Dimensionality": "3"
},
{
"Name": "DisplacementFieldSink",
"NameOfClass": "ItkDisplacementFieldSinkComponent",
"Dimensionality": "3"
}
],
"Connections": [
{
"Out": "DisplacementField",
"In": "DisplacementFieldComposer",
"NameOfInterface": "itkDisplacementFieldInterface"
},
{
"Out": "WarpingDisplacementField",
"In": "DisplacementFieldComposer",
"NameOfInterface": "itkWarpingDisplacementFieldInterface"
},
{
"Out": "DisplacementFieldComposer",
"In": "DisplacementFieldSink"
}
]
}
#!/bin/bash -eux
# This file serves as an example on how to run registrations locally on Linux and macOS
# using the Python scripts in this directory. Adapt the parameters to your own needs.
python --version
# Parameters
SUPERELASTIX_SOURCE_DIR=/Users/kasper/Development/SuperElastix
SUPERELASTIX_BUILD_DIR=/Users/kasper/Development/build/SuperElastixRelease
CRC_INPUT_DIR=/Users/kasper/Development/SuperBenchData
CRC_OUTPUT_DIR=/Users/kasper/Data/SuperBench
CRC_MASK_DIR=/Users/kasper/Development/SuperBenchMask
CRC_MAX_NUM_REG_PER_DATASET=8
CRC_DATASETS="--hammers-input-directory $CRC_INPUT_DIR/HAMMERS --popi-input-directory $CRC_INPUT_DIR/POPI/MedPhys11 --popi-mask-directory $CRC_MASK_DIR/POPI --dirlab-input-directory $CRC_INPUT_DIR/DIRLAB --dirlab-mask-directory $CRC_MASK_DIR/DIRLAB --spread-input-directory $CRC_INPUT_DIR/SPREAD --mgh10-input-directory $CRC_INPUT_DIR/MGH10 --cumc12-input-directory $CRC_INPUT_DIR/CUMC12 --ibsr18-input-directory $CRC_INPUT_DIR/IBSR18 --lpba40-input-directory $CRC_INPUT_DIR/LPBA40"
CRC_BLUEPRINT_FILE_NAME="--blueprint-file-name NiftyRegAffineBSpline.json" # Leave empty to run registrations for all blueprints
pushd $SUPERELASTIX_SOURCE_DIR
# Generate shell scripts
python -m ContinuousRegistration.Source.make_registration_scripts --superelastix $SUPERELASTIX_BUILD_DIR/SuperElastix-build/bin/SuperElastix --submissions-directory $SUPERELASTIX_SOURCE_DIR/ContinuousRegistration/Submissions --output-directory $CRC_OUTPUT_DIR $CRC_DATASETS $CRC_BLUEPRINT_FILE_NAME --max-number-of-registrations-per-dataset $CRC_MAX_NUM_REG_PER_DATASET
# Register images serially
# find $CRC_OUTPUT_DIR -type f -name "*.sh" -exec "{}" \;
# Register images in parallel
find $CRC_OUTPUT_DIR -type f -name "*.sh" | parallel -j6 bash
# Evaluate registrations, generate visualizations, and generate leaderboard
python -m ContinuousRegistration.Source.make_evaluation --superelastix $SUPERELASTIX_BUILD_DIR/SuperElastix-build/bin/SuperElastix --submissions-directory $SUPERELASTIX_SOURCE_DIR/ContinuousRegistration/Submissions --output-directory $CRC_OUTPUT_DIR $CRC_DATASETS $CRC_BLUEPRINT_FILE_NAME --max-number-of-registrations-per-dataset $CRC_MAX_NUM_REG_PER_DATASET --make-image-checkerboards True --make-label-checkerboards True --make-difference-images True
python -m ContinuousRegistration.Source.make_leaderboard --superelastix $SUPERELASTIX_BUILD_DIR/SuperElastix-build/bin/SuperElastix --submissions-directory $SUPERELASTIX_SOURCE_DIR/ContinuousRegistration/Submissions --output-directory $CRC_OUTPUT_DIR $CRC_DATASETS $CRC_BLUEPRINT_FILE_NAME --max-number-of-registrations-per-dataset $CRC_MAX_NUM_REG_PER_DATASET
popd
This diff is collapsed.
id,x_size,y_size,z_size,x_spacing,y_spacing,z_spacing
1,256,256,94,0.97,0.97,2.5
2,256,256,112,1.16,1.16,2.5
3,256,256,104,1.15,1.15,2.5
4,256,256,99,1.13,1.13,2.5
5,256,256,106,1.10,1.10,2.5
6,512,512,128,0.97,0.97,2.5
7,512,512,136,0.97,0.97,2.5
8,512,512,128,0.97,0.97,2.5
9,512,512,128,0.97,0.97,2.5
10,512,512,120,0.97,0.97,2.5
\ No newline at end of file
import os, json, datetime
from ContinuousRegistration.Source.make_registration_scripts import parser
from ContinuousRegistration.Source.util import logging, load_submissions, write_json
from ContinuousRegistration.Source.datasets import load_datasets
def run(parameters):
submissions = load_submissions(parameters)
datasets = load_datasets(parameters)
results = {}
for team_name, blueprint_file_names in submissions.items():
for blueprint_file_name in blueprint_file_names:
if not team_name in results:
results[team_name] = {}
blueprint_name, blueprint_ext = os.path.splitext(os.path.basename(blueprint_file_name))
if not blueprint_name in results[team_name]:
results[team_name][blueprint_name] = {}
logging.info('Loading blueprint %s/%s.' % (team_name, os.path.basename(blueprint_name)))
blueprint = json.load(open(blueprint_file_name))
for dataset_name in blueprint['Datasets']:
if not dataset_name in datasets:
continue
dataset = datasets[dataset_name]
results[team_name][blueprint_name][dataset_name] = []
for file_names in dataset.generator():
output_directory = os.path.join(parameters.output_directory, team_name, blueprint_name)
logging.info('Evaluating registration for blueprint %s and images %s.', blueprint_name, file_names['image_file_names'])
try:
results[team_name][blueprint_name][dataset.name].append(dataset.evaluate(
parameters.superelastix, file_names, output_directory))
if hasattr(parameters, 'make_images') and parameters.make_images:
dataset.make_images(parameters.superelastix, file_names, output_directory)
if hasattr(parameters, 'make_labels') and parameters.make_labels:
dataset.make_labels(parameters.superelastix, file_names, output_directory)
if hasattr(parameters, 'make_difference_images') and parameters.make_difference_images:
dataset.make_difference_images(parameters.superelastix, file_names, output_directory)
if hasattr(parameters, 'make_checkerboards') and parameters.make_checkerboards:
dataset.make_checkerboards(parameters.superelastix, file_names, output_directory)
if hasattr(parameters, 'make_image_checkerboards') and parameters.make_image_checkerboards:
dataset.make_image_checkerboards(parameters.superelastix, file_names, output_directory)
if hasattr(parameters, 'make_label_checkerboards') and parameters.make_label_checkerboards and dataset.name in ["CUMC12", "IBSR18", "LPBA40", "MGH10"]:
dataset.make_label_checkerboards(parameters.superelastix, file_names, output_directory)
except Exception as e:
logging.error('Error during evaluation of %s\'s blueprint %s and dataset %s: %s'
% (team_name, blueprint_name, dataset.name, str(e)))
write_json(os.path.join(parameters.output_directory,
'results.json'), results)
return results
if __name__ == '__main__':
parser.add_argument('--make-images', '-mi', type=bool, default=False, help="Warp moving images.")
parser.add_argument('--make-labels', '-ml', type=bool, default=False, help="Warp moving labels.")
parser.add_argument('--make-difference-images', '-mdi', type=bool, default=False, help="Warp moving images and subtract from fixed images.")
parser.add_argument('--make-checkerboards', '-mc', type=bool, default=False, help="Warp checkerboard pattern.")
parser.add_argument('--make-image-checkerboards', '-mic', type=bool, default=False,
help="Warp moving images and make checkerboard with fixed and warped moving image.")
parser.add_argument('--make-label-checkerboards', '-mlc', type=bool, default=False,
help="Warp moving labels and make checkerboard with fixed and warped moving label.")
run(parser.parse_args())
This diff is collapsed.
import glob, os
from ContinuousRegistration.Source.make_registration_scripts import parser, load_datasets, load_submissions
from ContinuousRegistration.Source.util import logging, load_results_from_json, get_script_path
from datetime import datetime
import numpy as np
import json
import subprocess
date = datetime.now().strftime('%d-%m-%Y')
def run(parameters):
result_file_names = glob.glob(os.path.join(parameters.output_directory, 'results*'))
# Most recent first
result_file_names.sort(reverse=True)
if not result_file_names:
raise Exception('No result JSON files found in %s.' % parameters.output_directory)
logging.info('Loading results from %s.' % result_file_names[0])
results, result_names = load_results_from_json(result_file_names[0])
datasets = load_datasets(parameters)
for dataset_name, dataset in datasets.items():
if not dataset_name in result_names:
# This dataset was not evaluated
continue
make_dataset_results(dataset_name, result_names, results)
def make_dataset_results(dataset_name, result_names, results):
table = '<!DOCTYPE html>'
table += '<html>'
table += '<head>'
table += '<link href="https://rawgit.com/tristen/tablesort/gh-pages/tablesort.css" rel="stylesheet">'
table += '<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous">'
table += '<script src="https://rawgit.com/tristen/tablesort/gh-pages/dist/tablesort.min.js"></script>'
table += '<style>'
table += 'table { width: 100%; } th, td { padding: 5px; text-align: left; }'
table += 'font-size: small'
table += '</style>'
table += '</head>'
table += '<body>'
table += '<table id="leaderboard-%s" class="sort">' % dataset_name
# Add table header
table += '<thead>'
table += '<tr>'
table += '<th role="columnheader">Team</th>'
table += '<th role="columnheader">Blueprint</th>'
table += '<th role="columnheader">Date</th>'
# Try to get blueprint commit hash
try:
repo_commit = subprocess.check_output(['git', 'describe', '--always'],
cwd=parameters.source_directory)
except subprocess.CalledProcessError as e:
repo_commit = None
logging.error('Error (exit code {0}): {1}'.format(e.returncode, e.output))
except Exception as e:
repo_commit = None
logging.error('Error reading commit hash from source directory "%s".' % parameters.source_directory)
if repo_commit:
table += '<th role="columnheader">Blueprint Commit</th>'
table += '<th role="columnheader">Repo Commit</th>'
table += '<th role="columnheader">Completed</th>'
for result_name in result_names[dataset_name]:
table += '<th role="columnheader">%s</th>' % result_name
table += '</tr>'
table += '</thead>'
table += '<tbody>'
for team_name, team_results in results.items():
for blueprint_name, blueprint_results in team_results.items():
if parameters.blueprint_file_name is not None and not blueprint_name \
in [os.path.splitext(blueprint_file_name)[0] for blueprint_file_name in parameters.blueprint_file_name]:
# User requested specific blueprints and this blueprint is not one of them
continue
blueprint_file_name_json = os.path.join(parameters.submissions_directory, team_name, blueprint_name + '.json')
blueprint_file_name_xml = os.path.join(parameters.submissions_directory, team_name,blueprint_name + '.xml')
if os.path.isfile(blueprint_file_name_json):
blueprint = json.load(open(blueprint_file_name_json))
elif os.path.isfile(blueprint_file_name_xml):
raise NotImplementedError('Cannot read xml blueprints yet.')
else:
raise Exception('Could not load blueprint.')
if dataset_name not in blueprint['Datasets']:
continue
make_blueprint_results(dataset_name, team_name, blueprint_name, blueprint_results, result_names)
table += '<tr>'
table += '<td>%s</td>' % team_name
table += '<td><a href="%s">%s</a></td>' % ('leaderboard-' + dataset_name + '-' + team_name + '-' + blueprint_name + '.html', blueprint_name)
table += '<td>%s</td>' % date
if repo_commit:
table += '<td>%s</td>' % repo_commit
# Try to get blueprint commit hash
try:
table += '<td>%s</td>' % subprocess.check_output(['git', 'describe', '--always'],
cwd=parameters.source_directory)
except subprocess.CalledProcessError as e:
table += '<td></td>'
logging.error('Error (exit code {0}): {1}'.format(e.returncode, e.output))
if dataset_name in blueprint_results \
and 'result' in blueprint_results[dataset_name] \
and not np.isnan(blueprint_results[dataset_name]['result']).all():
result = blueprint_results[dataset_name]['result']
table += '<td>%s/%s</td>' % (len(~np.isnan(result)), len(result))
means = np.nanmean(result, axis=0)
stds = np.nanstd(result, axis=0)
for mean, std in zip(means, stds):
table += '<td>%.2f \pm %.2f</td>' % (mean, std)
else:
table += '<td>0</td>'
for result_name in result_names[dataset_name]:
table += '<td>N/A</td>'
table += '</tr>'
table += '</tbody>'
table += '</table>'
table += '<script>new Tablesort(document.getElementById("leaderboard-%s"));</script>' % dataset_name
table += '</body>'
table += '</html>'
table_file = open(os.path.join(parameters.output_directory,
'leaderboard-' + dataset_name + '.html'), "w")
table_file.write(table)
table_file.close()
def make_blueprint_results(dataset_name, team_name, blueprint_name, blueprint_results, result_names):
blueprint_table = '<!DOCTYPE html>'
blueprint_table += '<html>'
blueprint_table += '<head>'
blueprint_table += '<link href="https://rawgit.com/tristen/tablesort/gh-pages/tablesort.css" rel="stylesheet">'
blueprint_table += '<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous">'
blueprint_table += '<script src="https://rawgit.com/tristen/tablesort/gh-pages/dist/tablesort.min.js"></script>'
blueprint_table += '<style>'
blueprint_table += 'table { width: 100%; } th, td { padding: 5px; text-align: left; }'
blueprint_table += '</style>'
blueprint_table += '</head>'
blueprint_table += '<body>'
blueprint_table += '<a href="leaderboard-' + dataset_name + '.html">Back</a>'
blueprint_table += '<table id="leaderboard-%s" class="sort">' % dataset_name
# Add table header
blueprint_table += '<thead>'
blueprint_table += '<tr>'
blueprint_table += '<th role="columnheader">Team</th>'
blueprint_table += '<th role="columnheader">Blueprint</th>'
blueprint_table += '<th role="columnheader">DisplacementField</th>'
blueprint_table += '<th role="columnheader">Date</th>'
for result_name in result_names[dataset_name]:
blueprint_table += '<th role="columnheader">%s</th>' % result_name
blueprint_table += '</tr>'
if dataset_name in blueprint_results \
and 'result' in blueprint_results[dataset_name]:
for result_name, results in zip(blueprint_results[dataset_name]['name'], blueprint_results[dataset_name]['result']):
blueprint_table += '<tr>'
blueprint_table += '<td>%s</td>' % team_name
blueprint_table += '<td>%s</td>' % blueprint_name
blueprint_table += '<td>%s</td>' % result_name
blueprint_table += '<td>%s</td>' % date
for result in results:
blueprint_table += '<td>%.2f</td>' % result
blueprint_table += '</tr>'
# Add table footer
blueprint_table += '</tbody>'
blueprint_table += '</table>'
blueprint_table += '<a href="leaderboard-' + dataset_name + '.html">Back</a>'
blueprint_table += '<script>new Tablesort(document.getElementById("leaderboard-%s"));</script>' % dataset_name
blueprint_table += '</body>'
blueprint_table += '</html>'
f = open(os.path.join(parameters.output_directory,
'leaderboard-' + dataset_name + '-' + team_name + '-' + blueprint_name + '.html'), "w")
f.write(blueprint_table)
f.close()
if __name__ == '__main__':
parameters = parser.parse_args()
if not os.path.exists(parameters.output_directory):
raise Exception('Output directory %s not found.' % parameters.output_directory)
run(parameters)
import os, json, argparse
from ContinuousRegistration.Source.datasets import logging, load_datasets
from ContinuousRegistration.Source.util import load_submissions
parser = argparse.ArgumentParser(description='Continuous Registration Challenge command line interface.')
parser.add_argument('--superelastix', '-selx', required=True,
help="Path to SuperElastix executable.")
parser.add_argument('--submissions-directory', '-sd', required=True,
help='Directory with parameter files.')
parser.add_argument('--output-directory', '-od', required=True,
help="Directory where results will be saved.")
parser.add_argument('--make-shell-scripts', '-mss', type=bool, default=True,
help="Generate shell scripts (default: True).")
parser.add_argument('--make-batch-scripts', '-mbs', type=bool, default=False,
help="Generate shell scripts (default: False).")
parser.add_argument('--brain2d-input-directory', '-b2d')
parser.add_argument('--lung2d-input-directory', '-l2d')
parser.add_argument('--cumc12-input-directory', '-cid')
parser.add_argument('--dirlab-input-directory', '-did')
parser.add_argument('--dirlab-mask-directory', '-dmd', default=None)
parser.add_argument('--empire-input-directory', '-eid')
parser.add_argument('--hammers-input-directory', '-hid')
parser.add_argument('--ibsr18-input-directory', '-iid')
parser.add_argument('--lpba40-input-directory', '-lid')
parser.add_argument('--spread-input-directory', '-sid')
parser.add_argument('--popi-input-directory', '-pid')
parser.add_argument('--popi-mask-directory', '-pmd', default=None)
parser.add_argument('--mgh10-input-directory', '-mid')
parser.add_argument('--hbia-input-directory', '-hbiaid')
parser.add_argument('--team-name', '-tn', action='append',
help="If specified, only generated shell scripts for these teams.")
parser.add_argument('--blueprint-file-name', '-bfn', action='append',
help="If specified, only generated shell scripts for this blueprint.")
parser.add_argument('--max-number-of-registrations-per-dataset', '-mnorpd', type=int, default=8)
parser.add_argument('--source-directory', '-srcd', default='.')
def run(parameters):
if not parameters.make_shell_scripts and not parameters.make_batch_scripts:
logging.error('Neither --make-shell-scripts or --make-batch-scripts were True. Nothing to do.')
quit()
submissions = load_submissions(parameters)
datasets = load_datasets(parameters)
for team_name, blueprint_file_names in submissions.items():
for blueprint_file_name in blueprint_file_names:
logging.info('Loading blueprint %s.' % blueprint_file_name)
blueprint = json.load(open(blueprint_file_name))
if not 'Datasets' in blueprint:
logging.error('Missing key \'Datasets\' in blueprint %s. '
'Blueprint must specify on which datasets it should be evaluated. '
'Example: { Datasets: [\"SPREAD\", \"POPI\", \"LPBA40\"] }. '
'Skipping blueprint.' % blueprint_file_name)
continue
for dataset_name in blueprint['Datasets']:
if not dataset_name in datasets:
continue
dataset = datasets[dataset_name]
blueprint_name, blueprint_ext = os.path.splitext(os.path.basename(blueprint_file_name))
for file_names in dataset.generator():
logging.info('Generating registration scripts for blueprint "%s" and images %s.',
blueprint_name, file_names['image_file_names'])
dir_name = os.path.dirname(file_names['disp_field_file_names'][0])
blueprint_output_directory = os.path.join(parameters.output_directory,
team_name, blueprint_name,
dir_name)
if not os.path.exists(blueprint_output_directory):
os.makedirs(blueprint_output_directory)
output_directory = os.path.join(parameters.output_directory,
team_name, blueprint_name)
if parameters.make_shell_scripts:
dataset.make_shell_scripts(parameters.superelastix,
blueprint_file_name, file_names,
output_directory)
if parameters.make_batch_scripts:
dataset.make_batch_scripts(parameters.superelastix,
blueprint_file_name,
file_names, output_directory)
if __name__ == '__main__':
parameters = parser.parse_args()
if not os.path.isfile(parameters.superelastix):
raise Exception('Could not find SuperElastix '
+ parameters.superelastix + ".")
if not os.path.exists(parameters.submissions_directory):
raise Exception('Could not find submission directory '
+ parameters.submissions_directory + ".")
if not os.path.exists(parameters.output_directory):
os.mkdir(parameters.output_directory)
run(parameters)
import numpy as np
import SimpleITK as sitk
from ContinuousRegistration.Source.util import warp_image, warp_point_set, compose_displacement_fields
from ContinuousRegistration.Source.util import logging
def tre(superelastix, point_sets, deformation_field_file_names):
try:
point_set_fixed0_to_moving1 = warp_point_set(superelastix, point_sets[0], deformation_field_file_names[0])
point_set_fixed1_to_moving0 = warp_point_set(superelastix, point_sets[1], deformation_field_file_names[1])
except Exception:
logging.error('Failed to compute tre for image pair (%s, %s).' % deformation_field_file_names)
return (
{'1. TRE': np.NaN},