inital commit

This commit is contained in:
Furen 2022-05-11 11:38:16 +08:00
commit 822692d032
7 changed files with 685 additions and 0 deletions

127
1-move.py Executable file
View file

@ -0,0 +1,127 @@
#!/usr/bin/env python3
import os
import sys
import time
from pydicom.dataset import Dataset
from pynetdicom import AE, evt, StoragePresentationContexts, debug_logger
from pynetdicom.sop_class import PatientRootQueryRetrieveInformationModelMove
# debug_logger()
Series = {}
dcm_dir = '/nn'
def handle_store(event):
# print(event)
"""Handle a C-STORE request event."""
ds = event.dataset
ds.file_meta = event.file_meta
# print(ds.SeriesInstanceUID)
if ds.SeriesInstanceUID in Series:
s = Series[ds.SeriesInstanceUID]
else:
number = len(Series)
series_dir = os.path.join(dcm_dir, ds.PatientID, ds.StudyInstanceUID, ds.SeriesInstanceUID, str(number))
series_dir = os.path.join(dcm_dir, ds.PatientID, ds.StudyDate, ds.Modality, str(number))
series_dir = os.path.join(dcm_dir, ds.PatientID, ds.StudyDate, ds.Modality, hex(number)[2:])
s = {
'PatientID': ds.PatientID,
'StudyInstanceUID': ds.StudyInstanceUID,
'SeriesInstanceUID': ds.SeriesInstanceUID,
'Modality': ds.Modality,
'SeriesDescription': ds.SeriesDescription,
'path': series_dir,
'number': number,
}
# print(s)
print(s['path'], s['SeriesDescription'])
Series[ds.SeriesInstanceUID] = s
os.makedirs(s['path'], exist_ok=True)
filename = os.path.join(s['path'], ds.SOPInstanceUID)
# Save the dataset using the SOP Instance UID as the filename
ds.save_as(filename, write_like_original=False)
# Return a 'Success' status
return 0x0000
def QueryDCM(PatientID):
handlers = [(evt.EVT_C_STORE, handle_store)]
# Initialise the Application Entity
ae = AE()
# Add a requested presentation context
ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove)
# Add the Storage SCP's supported presentation contexts
ae.supported_contexts = StoragePresentationContexts
# Start our Storage SCP in non-blocking mode, listening on port 11120
ae.ae_title = 'OUR_STORE_SCP'
# scp = ae.start_server(("127.0.0.1", 11120), block=False, evt_handlers=handlers)
scp = ae.start_server(("0.0.0.0", 11120), block=False, evt_handlers=handlers)
# Create out identifier (query) dataset
ds = Dataset()
ds.QueryRetrieveLevel = 'PATIENT'
ds.PatientID = PatientID
# Associate with peer AE at IP 127.0.0.1 and port 11112
# assoc = ae.associate("127.0.0.1", 11112)
assoc = ae.associate("192.168.10.56", 104,
ae_title = 'IQWEBX',
)
if assoc.is_established:
# Use the C-MOVE service to send the identifier
responses = assoc.send_c_move(ds, 'OUR_STORE_SCP', PatientRootQueryRetrieveInformationModelMove)
for (status, identifier) in responses:
# print(status, identifier)
if status:
print('C-MOVE query status: 0x{0:04x}'.format(status.Status))
else:
print('Connection timed out, was aborted or received invalid response')
# Release the association
assoc.release()
else:
print('Association rejected, aborted or never connected')
# Stop our Storage SCP
scp.shutdown()
def main():
if len(sys.argv) < 2:
print('Usage:', sys.argv[0], 'PatientID')
sys.exit()
print('hello')
print(sys.argv[0])
print(sys.argv[1])
start = time.time()
QueryDCM(sys.argv[1])
end = time.time()
print(end - start, 'seconds')
# for k, v in Series.items():
# print(v['number'], v['Modality'], v['SeriesDescription'], v['path'])
# print(v['path'], v['SeriesDescription'])
if __name__ == '__main__':
main()

147
2-infer.py Executable file
View file

@ -0,0 +1,147 @@
#!/usr/bin/env python3
import difflib
import os
import shutil
import subprocess
import sys
import time
from nipype.interfaces.dcm2nii import Dcm2niix
from rt_utils import RTStructBuilder
import numpy as np
import SimpleITK as sitk
import itk_elastix
def dcm2nii(source_dir, output_dir):
# print(source_dir)
# print(output_dir)
converter = Dcm2niix()
converter.inputs.source_dir = source_dir
# converter.inputs.compression = 5
converter.inputs.output_dir = output_dir
print(converter.cmdline)
converter.run()
def register(DCM_CT, DCM_MR):
matcher = difflib.SequenceMatcher(a=DCM_CT, b=DCM_MR)
match = matcher.find_longest_match(0, len(matcher.a), 0, len(matcher.b))
ROOT_DIR = DCM_CT[:match.size]
NII_DIR = os.path.join(ROOT_DIR, 'nii')
INPUT_DIR = os.path.join(ROOT_DIR, 'input')
OUTPUT_DIR = os.path.join(ROOT_DIR, 'output')
head, tail = os.path.split(DCM_CT)
rtss_file = os.path.join(head, tail+'-rtss.dcm')
shutil.rmtree(NII_DIR, ignore_errors=True)
os.makedirs(NII_DIR)
shutil.rmtree(INPUT_DIR, ignore_errors=True)
os.makedirs(INPUT_DIR)
shutil.rmtree(OUTPUT_DIR, ignore_errors=True)
# os.makedirs(OUTPUT_DIR)
nCT = os.path.basename(DCM_CT)
nMR = os.path.basename(DCM_MR)
# print(nCT, nMR)
# exit()
dcm2nii(DCM_CT, NII_DIR)
dcm2nii(DCM_MR, NII_DIR)
for e in os.scandir(NII_DIR):
if e.name.endswith('.nii.gz'):
if e.name.startswith(nCT+'_'):
NII_CT = e.path
elif e.name.startswith(nMR+'_'):
NII_MR = e.path
basename = os.path.basename(NII_MR)
old = '_'+basename.split('_')[-1]
input_file = os.path.join(INPUT_DIR, basename.replace(old, '_0000.nii.gz'))
output_file = os.path.join(OUTPUT_DIR, basename.replace(old, '.nii.gz'))
basename_ct = os.path.basename(NII_CT)
old_ct = '_'+basename_ct.split('_')[-1]
label_file = os.path.join(NII_DIR, basename_ct.replace(old_ct, '.label.nii.gz'))
shutil.copy(NII_MR, input_file)
print(NII_CT, NII_MR, input_file)
# nnUNet_predict -i INPUT_FOLDER -o OUTPUT_FOLDER -t 222 -m 3d_lowres --save_npz
subprocess.run(["nnUNet_predict",
"-i", INPUT_DIR,
"-o", OUTPUT_DIR,
"-t", "222",
"-m", "3d_lowres",
"--save_npz",
])
print(output_file)
r2 = itk_elastix.register(NII_CT, NII_MR)
itk_elastix.transform_write(output_file, r2['fwdtransforms'], label_file, is_label=True)
reader = sitk.ImageSeriesReader()
dicom_names = reader.GetGDCMSeriesFileNames(DCM_CT)
reader.SetFileNames(dicom_names)
reader.MetaDataDictionaryArrayUpdateOn()
reader.LoadPrivateTagsOn()
image = reader.Execute()
nnU = sitk.ReadImage(label_file)
nnU = sitk.Resample(nnU, image, sitk.Transform(), sitk.sitkNearestNeighbor)
ccfilter = sitk.ConnectedComponentImageFilter ()
nnUCC = ccfilter.Execute(nnU)
ObjectCount1 = ccfilter.GetObjectCount()
rtstruct = RTStructBuilder.create_new(dicom_series_path=DCM_CT)
for j1 in range(ObjectCount1):
label1 = sitk.BinaryThreshold(nnUCC, j1+1, j1+1)
# label1 = sitk.AntiAliasBinary(label1)
mask = sitk.GetArrayFromImage(label1).astype(bool)
mask = np.transpose(mask, (1, 2, 0))
# continue
if mask.any():
print(j1)
rtstruct.add_roi(
mask=mask,
# use_pin_hole=True,
# name="n%d"%n,
)
print(rtss_file)
rtstruct.save(rtss_file)
def main():
if len(sys.argv) < 2:
print('Usage:', sys.argv[0], 'DCM_CT', 'DCM_MR')
sys.exit()
print('hello')
print(sys.argv[0])
print(sys.argv[1])
start = time.time()
register(sys.argv[1], sys.argv[2])
end = time.time()
print(end - start, 'seconds')
if __name__ == '__main__':
main()

59
3-send.py Executable file
View file

@ -0,0 +1,59 @@
#!/usr/bin/env python3
import os
import sys
from pydicom import dcmread
from pynetdicom import AE, debug_logger
from pynetdicom.sop_class import CTImageStorage, RTStructureSetStorage
debug_logger()
def SendDCM(fp):
# Initialise the Application Entity
ae = AE()
ae.ae_title = 'OUR_STORE_SCP'
# Add a requested presentation context
# ae.add_requested_context(CTImageStorage)
ae.add_requested_context(RTStructureSetStorage)
# Read in our DICOM CT dataset
ds = dcmread(fp)
# Associate with peer AE at IP 127.0.0.1 and port 11112
assoc = ae.associate("127.0.0.1", 11112)
assoc = ae.associate("172.16.40.36", 104,
ae_title = 'N1000_STORAGE',
)
if assoc.is_established:
# Use the C-STORE service to send the dataset
# returns the response status as a pydicom Dataset
status = assoc.send_c_store(ds)
# Check the status of the storage request
if status:
# If the storage request succeeded this will be 0x0000
print('C-STORE request status: 0x{0:04x}'.format(status.Status))
else:
print('Connection timed out, was aborted or received invalid response')
# Release the association
assoc.release()
else:
print('Association rejected, aborted or never connected')
def main():
if len(sys.argv) < 2:
print('Usage:', sys.argv[0], 'RTSS')
sys.exit()
print('hello')
print(sys.argv[0])
print(sys.argv[1])
SendDCM(sys.argv[1])
if __name__ == '__main__':
main()

35
docker/Dockerfile Executable file
View file

@ -0,0 +1,35 @@
FROM nvcr.io/nvidia/clara-train-sdk:v4.1
# apt
RUN apt-get update -y \
&& apt-get install dcm2niix openssh-server pigz sudo -y
# pip
# RUN pip install antspyx itk-elastix nipype nnunet rt_utils
RUN pip install antspyx itk-elastix nnunet # too large .... install first
RUN pip install --upgrade git+https://github.com/FabianIsensee/hiddenlayer.git@more_plotted_details#egg=hiddenlayer
RUN pip install git+https://github.com/qurit/rt-utils.git@5bab9ffcc8fe19dd775e940afdc3d8f48f869150 # fix FrameOfReferenceUID
RUN pip install masonite nipype
# nnUNet
ENV nnUNet_raw_data_base="/workspace/nnUNet_raw_data_base"
ENV nnUNet_preprocessed="/workspace/nnUNet_preprocessed"
ENV RESULTS_FOLDER="/workspace/nnUNet_trained_models"
# SSH server
#RUN echo 'root:password' | chpasswd
#RUN echo "PasswordAuthentication yes" >> /etc/ssh/sshd_config
#RUN echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
#ENTRYPOINT service ssh restart && env >> /etc/environment && bash
#EXPOSE 22
# jupyter
ENTRYPOINT jupyter-lab
EXPOSE 8888
# pynetdicom
EXPOSE 11120
# WORKDIR /workspace
WORKDIR /123

1
docker/build.sh Executable file
View file

@ -0,0 +1 @@
DOCKER_BUILDKIT=1 docker build -t 123:v0 .

14
docker/qrun.sh Executable file
View file

@ -0,0 +1,14 @@
#DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
#DIR="$(dirname $( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd ))"
export dockerImage=123:v0
#docker run --gpus all --ipc=host --ulimit memlock=-1 --ulimit stack=67108864 -it --rm -v $DIR:/workspace $dockerImage /bin/bash
GPU=nvidia0 gpu-docker run --ipc=host --ulimit memlock=-1 --ulimit stack=67108864 -it --rm \
-v /share/WORKSPACE/nnUNet:/workspace \
-v /share/Public/git/123:/123 \
-v /share/Public/git/123:/root \
-v /share/Public/nn:/nn \
-p 11120:11120 \
-p 8888:8888 \
$dockerImage &

302
itk_elastix.py Normal file
View file

@ -0,0 +1,302 @@
from pprint import pprint
import os
import re
import shutil
import tempfile
from skimage.metrics import normalized_mutual_information
import itk
metric_patern = r'Final metric value = (\S+)'
metric_prog = re.compile(metric_patern)
'''
ants.registration.interface
myiterations = '2100x1200x1200x10'
'''
# MaximumNumberOfIterations = ['1200']
# MaximumNumberOfIterations = ['2100']
# NumberOfIterations = 1200
NumberOfIterations = 2100
# NumberOfIterations = 4000
def register_aux(fi, mv, debug=False, MaximumNumberOfIterations=[str(NumberOfIterations)]):
parameter_object = itk.ParameterObject.New()
default_rigid_parameter_map = parameter_object.GetDefaultParameterMap('rigid')
default_rigid_parameter_map["AutomaticTransformInitialization"] = ["true"]
# default_rigid_parameter_map["NumberOfSamplesForExactGradient"] = ["100000"]
default_rigid_parameter_map['MaximumNumberOfIterations'] = MaximumNumberOfIterations
parameter_object.AddParameterMap(default_rigid_parameter_map)
# pprint(default_rigid_parameter_map.asdict())
# exit()
outdir1 = tempfile.mkdtemp()
try:
fm, params1 = itk.elastix_registration_method(
fi, mv,
parameter_object=parameter_object,
# log_to_console=True,
log_to_file=True,
output_directory = outdir1,
)
except Exception as ex:
print(ex)
print(os.path.join(outdir1, 'elastix.log'))
# exit()
return {
'metrics': 0,
}
TransformParameterFileName = os.path.join(outdir1, 'TransformParameters.0.txt')
# print(TransformParameterFileName)
# exit()
'''
The DisplacementMagnitudePenalty is a cost function that penalises ||(x) x||2. You can use this
to invert transforms, by setting the transform to be inverted as an initial transform (using -t0), setting
(HowToCombineTransforms "Compose"), and running elastix with this metric, using the original fixed
image set both as fixed (-f) and moving (-m) image. After that you can manually set the initial transform
in the last parameter file to "NoInitialTransform", and voila, you have the inverse transform! Strictly
speaking, you should then also change the Size/Spacing/Origin/Index/Direction settings to match that of
the moving image. Select it with:
(Metric "DisplacementMagnitudePenalty")
Note that inverting a transformation becomes conceptually very similar to performing an image registration
in this way. Consequently, the same choices are relevant: optimisation algorithm, multiresolution etc...
Note that this procedure was described and evaluated in Metz et al. [2011].
'''
parameter_object2 = itk.ParameterObject.New()
inverse_rigid_parameter_map = parameter_object.GetDefaultParameterMap('rigid')
inverse_rigid_parameter_map["HowToCombineTransforms"] = ["Compose"]
inverse_rigid_parameter_map["Metric"] = ["DisplacementMagnitudePenalty"]
# inverse_rigid_parameter_map["AutomaticTransformInitialization"] = ["true"]
inverse_rigid_parameter_map['MaximumNumberOfIterations'] = MaximumNumberOfIterations
# inverse_rigid_parameter_map['UseAdaptiveStepSizes'] = ['false']
parameter_object2.AddParameterMap(inverse_rigid_parameter_map)
# print(TransformParameterFileName)
# exit()
outdir2 = tempfile.mkdtemp()
mm, params2 = itk.elastix_registration_method(
mv, mv,
parameter_object=parameter_object2,
initial_transform_parameter_file_name = TransformParameterFileName,
log_to_console=debug,
log_to_file=True,
output_directory = outdir2,
)
elastix_log = os.path.join(outdir2, 'elastix.log')
with open(elastix_log) as log:
m = re.search(metric_prog, log.read())
DisplacementMagnitudePenalty = float(m[1])
# print(DisplacementMagnitudePenalty)
# exit()
last_parameter_map = params2.GetParameterMap(0)
# pprint(last_parameter_map.asdict())
# exit()
last_parameter_map["InitialTransformParametersFileName"] = ["NoInitialTransform"]
params2.SetParameterMap(0, last_parameter_map)
# params2.WriteParameterFile('123.txt')
mf = itk.transformix_filter(
fi,
params2)
m1 = normalized_mutual_information(itk.GetArrayViewFromImage(fi), itk.GetArrayViewFromImage(fm))
m2 = normalized_mutual_information(itk.GetArrayViewFromImage(mv), itk.GetArrayViewFromImage(mf))
print(MaximumNumberOfIterations, m1, m2, DisplacementMagnitudePenalty)
shutil.rmtree(outdir1)
shutil.rmtree(outdir2)
# exit()
return {
'fwdtransforms': params1,
'invtransforms': params2,
'warpedfixout': mf,
'warpedmovout': fm,
'metrics': max(m1, m2),
'DisplacementMagnitudePenalty': DisplacementMagnitudePenalty,
}
PixelType = itk.F
Dimension = 3
ImageType = itk.Image[PixelType, Dimension]
# reader = itk.ImageFileReader[ImageType].New()
# reader.SetFileName("image.tif")
# reader.Update()
# image = reader.GetOutput()
# METRIC_THRESHOLD = 1.1
def register(fi, mv, warpedfixout=None, warpedmovout=None, debug=False, iterations_init=NumberOfIterations):
# reader = itk.ImageFileReader[ImageType].New()
# reader.SetFileName(fi)
# reader.Update()
# fixed_image = reader.GetOutput()
fixed_image = itk.imread(fi, itk.F)
moving_image = itk.imread(mv, itk.F)
iterations = iterations_init
# iterations_fin = iterations_init*2
while True:
MaximumNumberOfIterations = [str(iterations)]
r1 = register_aux(fixed_image, moving_image, debug, MaximumNumberOfIterations=MaximumNumberOfIterations)
if 'DisplacementMagnitudePenalty' not in r1: # None?
break
if r1['DisplacementMagnitudePenalty'] < 1:
break
# elif r1['metrics'] > METRIC_THRESHOLD:
# Redo = False
# if iterations>iterations_fin:
# Redo = False
iterations *= 2
while True:
MaximumNumberOfIterations = [str(iterations)]
r2 = register_aux(moving_image, fixed_image, debug, MaximumNumberOfIterations=MaximumNumberOfIterations)
if 'DisplacementMagnitudePenalty' not in r2: # None?
break
elif r2['DisplacementMagnitudePenalty'] < 1:
break
# elif r2['metrics'] > METRIC_THRESHOLD:
# Redo = False
# if iterations>iterations_fin:
# Redo = False
iterations *= 2
if r1['metrics'] > r2['metrics']:
res = r1
else:
if 'invtransforms' not in r2:
return None
res = dict(r2)
res.update({
'fwdtransforms': r2['invtransforms'],
'invtransforms': r2['fwdtransforms'],
'warpedfixout': r2['warpedmovout'],
'warpedmovout': r2['warpedfixout'],
})
assert res['DisplacementMagnitudePenalty'] < 1, 'DisplacementMagnitudePenalty: %f ' % (res['DisplacementMagnitudePenalty'])
if warpedfixout is not None:
itk.imwrite(res['warpedfixout'], warpedfixout)
if warpedmovout is not None:
itk.imwrite(res['warpedmovout'], warpedmovout)
if debug:
pprint(res)
itk.imwrite(fixed_image, '0fixed.nii.gz')
itk.imwrite(moving_image, '0moving.nii.gz')
itk.imwrite(r1['warpedfixout'], '0mf1.nii.gz')
itk.imwrite(r1['warpedmovout'], '0fm1.nii.gz')
itk.imwrite(r2['warpedmovout'], '0mf2.nii.gz')
itk.imwrite(r2['warpedfixout'], '0fm2.nii.gz')
return res
def transform_write(moving, transform, output_filename, is_label=False):
mv = itk.imread(moving)
last_parameter_map = transform.GetParameterMap(0)
if is_label:
# last_parameter_map["InitialTransformParametersFileName"] = ["NoInitialTransform"]
last_parameter_map["ResampleInterpolator"] = ["FinalNearestNeighborInterpolator"]
# last_parameter_map["ResultImagePixelType"] = ["unsigned char"]
t2 = itk.ParameterObject.New()
t2.AddParameterMap(last_parameter_map)
# pprint(t2.GetParameterMap(0).asdict())
output = itk.transformix_filter(
mv.astype(itk.F),
t2)
if is_label:
output=output.astype(itk.UC)
itk.imwrite(output, output_filename)
if __name__ == '__main__':
fi = '/media/nfs/SRS/TSGH2022G4/register_fwd/2131720/case2017.04.21.11.01.48/patient_6_RT_Cyberknife_1mm_Head_20170420091131_3.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/register_fwd/2131720/case2017.04.21.11.01.48/patient_T1_SE_GD_20170420102216_3.nii.gz'
fi = '/media/nfs/SRS/TSGH2022G4/image/2131720/patient_6_RT_Cyberknife_1mm_Head_20170420091131_3.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/2131720/patient_T1_SE_GD_20170420102216_3.nii.gz'
fi = '/media/nfs/SRS/TSGH2022G4/image/1693329/patient_6_Head_CTA_CNY_Head_20100330100138_18.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/1693329/patient_T1_SE_GD_20100330105831_3.nii.gz'
# 4D image
# fi = '/media/nfs/SRS/NTUH2022G4/image/6053604/patient_1.7_CTA_+_Perfusion_(Subtraction)_20140121125458_103.nii.gz'
# mv = '/media/nfs/SRS/NTUH2022G4/image/6053604/patient_AX_T1+C(3D_2MM)_ZIP_512_20140121105711_5.nii.gz'
# bad registraation
fi = '/media/nfs/SRS/TSGH2022G4/image/2978108/patient_6_RT_Cyberknife_1mm_Head_20170608103902_3.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/2978108/patient_Ax_T1_SE_CK_GD+_20170608094843_4.nii.gz'
fi = '/media/nfs/SRS/TSGH2022G4/image/2511774/patient_6_RT_Cyberknife_1mm_Head_20141215144427_3.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/2511774/patient_T1_SE_GD_20141215140945_3.nii.gz'
fi = '/media/nfs/SRS/TSGH2022G4/image/2305719/patient_6_RT_Cyberknife_1mm_Head_20090519105710_4.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/2305719/patient_T1_SE_GD_20090519104815_3.nii.gz'
# insufficent inverse
fi = '/media/nfs/SRS/TSGH2022G4/image/2441399/patient_2_Liver_3Phase_CK_Abdomen_20111031104910_9_e1.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/2441399/patient_6_RT_Cyberknife_1mm_Head_20120625103934_3.nii.gz'
fi = '/media/nfs/SRS/TSGH2022G4/image/2232394/patient_6_RT_Cyberknife_1mm_Head_20100222122239_3.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/2232394/patient_6_RT_Cyberknife_1mm_Head_20100222092318_4.nii.gz'
fi = '/media/nfs/SRS/TSGH2022G4/image/1255468/patient_6_RT_Cyberknife_1mm_Head_20080417105648_3.nii.gz'
mv = '/media/nfs/SRS/TSGH2022G4/image/1255468/patient_ebrain_IR_T1_ax_GD_20110209152748_1102.nii.gz'
# r = register(fi, mv, debug=True)
r = register(fi, mv)
print(r)