代码段:
# -*- coding: utf-8 -*-
"""
Save atlases propagation results using registration with dense displacement fields predicted from networks.
@author: Xinzhe Luo
@version: 0.1
"""
from __future__ import print_function, division, absolute_import, unicode_literals
from core import model_ddf_mvmm_label_base as model
from core import image_dataset as image_utils
from core import utils, losses
# import nibabel as nib
import numpy as np
import os
import logging
import tensorflow as tf
import pandas as pd
import argparse
from datetime import datetime
t = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
parser = argparse.ArgumentParser(description='Start atlas propagation on test dataset!')
parser.add_argument('--time', default=t, type=str,
help='The current time to save test predictions.')
parser.add_argument('--space', type=str, default='commonspace2',
choices=['commonspace1', 'commonspace2'],
help='The commonspace type for test data.')
parser.add_argument('--spacing', default='2mm', type=str, choices=['1mm', '2mm'],
help='The spatial spacing of the network inputs and the dense displacement fields.')
parser.add_argument('--dropout', default=0, type=float, help='The dropout probability for network prediction.')
parser.add_argument('--dropout_type', default='regular', type=str, choices=['regular', 'spatial'],
help='dropout type')
parser.add_argument('--model_path', type=str, default=None,
help='The model path to restore the network parameters for network prediction.')
parser.add_argument('--latest_filename', default='best_checkpoint', type=str,
help='latest filename to restore the model')
parser.add_argument('--trial', default=0, type=int, help='which trial to load the model')
parser.add_argument('--cuda_device', default=0, type=int,
help='The cuda device for network prediction.')
parser.add_argument('--atlas_search_path', default='../../../dataset/training_mr_20_commonspace2/*.nii.gz', type=str,
help='The search pattern to find all training atlas images, labels and probabilities.')
parser.add_argument('--atlas_modality', default='mr', choices=['mr', 'ct'],
help="the modality of atlas image, either 'mr' or 'ct'")
parser.add_argument('--a_min', default=None, type=float, help='min value for intensity clipping')
parser.add_argument('--a_max', default=None, type=float, help='max value for intensity clipping')
parser.add_argument('--image_suffix', default='image.nii.gz', type=str,
help='suffix pattern for the images')
parser.add_argument('--label_suffix', default='label.nii.gz', type=str,
help='suffix pattern for the labels')
parser.add_argument('--weight_suffix', default=None, type=None,
help='suffix pattern for the weights')
parser.add_argument('--crop_patch', default=True, type=bool,
help='whether to crop patches of the test data')
parser.add_argument('--patch_center', default=None, nargs='+',
help='The customized patch center, default is None.')
parser.add_argument('--patch_size', default=(80, 80, 80), type=int, nargs='+',
help='The size of the cropped patch.')
parser.add_argument('--original_size', default=(112, 96, 112), type=int, nargs=3,
help='original size of the saved image')
parser.add_argument('--num_blocks', default=(1, 1, 1), type=int, nargs='+',
help='The number of blocks of input along each axis, default is (1, 1, 1).')
parser.add_argument('--method', default='unet',
choices=['ddf_label', 'ddf_label_v0', 'unet'], type=str,
help='the method of network to infer the dense displacement fields')
parser.add_argument('--num_down_blocks', default=4, type=int,
help='the number of downside convolution blocks of the network')
parser.add_argument('--ddf_levels', default=None, type=int, nargs='*',
help='the network levels where to extract dense displacement fields')
parser.add_argument('--features_root', default=32, type=int,
help='number of features of the first convolution layer')
parser.add_argument('--normalizer', default=None, type=str,
choices=['batch', 'group', 'layer', 'instance', 'batch_instance'],
help='type of network normalization method')
parser.add_argument('--diffeomorphism', default=False, action='store_true',
help='whether to use diffeomorphic transformations')
parser.add_argument('--int_steps', default=4, type=int,
help='number of integration steps on the velocity fields')
parser.add_argument('--cost_function', default='label_consistency',
choices=(['MvMM_negative_log-likelihood', 'label_consistency', 'multi_scale_label_consistency',
'dice', 'multi_scale_dice', 'cross_entropy', 'SSD']),
help='the type of cost function for network optimization')
parser.add_argument('--reg_stage', default='single', type=str, choices=['single', 'multi'],
help="The registration stage, either 'single' or 'multi'.")
parser.add_argument('--test_input_size', default=(112, 96, 112), type=int, nargs='+',
help='The test input size.')
parser.add_argument('--save_ddf', default=False, action='store_true',
help='whether to save displacement field into nifty files')
# parser.add_argument('--save_path', default='./', type=str,
# help="Path where to save the test results.")
args = parser.parse_args()
# determine the prediction/metrics save path and the data search path
if args.space == 'commonspace1':
save_path = os.path.join(args.model_path, 'test_predictions_commonspace1_%s' % args.spacing)
target_search_path = '../../../dataset/test_mr_40_commonspace1/*.nii.gz'
metrics_path = os.path.join(args.model_path, 'metrics_test_pairwise_commonspace1_%s.xlsx' % args.spacing)
scale_model = 1
elif args.space == 'commonspace2':
save_path = os.path.join(args.model_path, 'test_predictions_commonspace2_%s' % args.spacing)
target_search_path = '../../../dataset/test_mr_40_commonspace2/*.nii.gz'
metrics_path = os.path.join(args.model_path, 'metrics_test_pairwise_commonspace2_%s.xlsx' % args.spacing)
scale_model = 0
else:
raise Exception("The space must be either 'commonspace1' or 'commonspace2'!")
if __name__ == '__main__':
# set cuda device
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device)
# set working directory
print("Current working directory: %s" % os.getcwd())
os.chdir('../')
print("Working directory changed to: %s" % os.path.abspath(os.getcwd()))
if not os.path.exists(save_path):
logging.info("Allocating '{:}'".format(save_path))
os.makedirs(save_path)
if 'model_trained' in args.model_path and 'trial' in args.model_path:
model_path = args.model_path
else:
# model_dir = os.path.join(args.model_path, 'trial_%s' % args.trial, 'model_trained')
model_dir = args.model_path
print('model_dir')
print(model_dir)
print('args.latest_filename')
print(args.latest_filename)
ckpt = tf.train.get_checkpoint_state(model_dir, latest_filename=args.latest_filename)
print('ckpt')
print(ckpt)
model_path = ckpt.model_checkpoint_path
test_model_data_provider = image_utils.ImageDataProvider(
target_search_path='../../../dataset/test_mr_40_commonspace2/*.nii.gz',
atlas_search_path=args.atlas_search_path,
atlas_modality=args.atlas_modality,
a_min=args.a_min,
a_max=args.a_max,
image_suffix=args.image_suffix,
label_suffix=args.label_suffix,
train_phase=False,
n_atlas=1,
crop_patch=args.crop_patch,
patch_center=args.patch_center,
patch_size=args.patch_size,
crop_roi=False,
image_normalization=True,
channels=1,
n_class=8,
label_intensity=(0, 205, 420, 500, 550, 600, 820, 850),
scale=0,
num_blocks=args.num_blocks,
image_name_index_begin=-38,
stage=args.reg_stage)
test_data_provider = image_utils.ImageDataProvider(target_search_path=target_search_path,
atlas_search_path=args.atlas_search_path,
atlas_modality=args.atlas_modality,
a_min=args.a_min,
a_max=args.a_max,
image_suffix=args.image_suffix,
label_suffix=args.label_suffix,
weight_suffix=args.weight_suffix,
train_phase=False,
n_atlas=1,
crop_patch=False,
# patch_center=[i*2**scale_model
# for i in args.patch_center]
# if args.patch_center else None,
# patch_size=[i*2**scale_model
# for i in args.patch_size],
crop_roi=False,
image_normalization=False,
channels=1,
n_class=8,
label_intensity=(0, 205, 420, 500, 550, 600, 820, 850),
scale=0,
num_blocks=args.num_blocks,
stage=args.reg_stage,
image_name_index_begin=-38)
logging.info("Number of target-atlas pairs: %s" % len(test_data_provider))
with tf.Graph().as_default():
net = model.NetForPrediction(n_blocks=args.num_blocks,
test_input_size=args.test_input_size,
input_scale=scale_model,
input_size=args.patch_size,
channels=1,
n_class=2,
test_n_class=8,
n_atlas=1,
n_subtypes=(2, 1),
method=args.method,
features_root=args.features_root,
normalizer=args.normalizer,
num_down_blocks=args.num_down_blocks,
dropout_type=args.dropout_type,
ddf_levels=args.ddf_levels,
diffeomorphism=args.diffeomorphism,
int_steps=args.int_steps,
cost_kwargs={'cost_name': args.cost_function,
'regularizer': [None, 'bending_energy'],
'regularization_coefficient': [0., 1.]})
# add number of negative Jacobians
BendingEnergy = losses.LocalDisplacementEnergy('bending')
jacobian_det = BendingEnergy.compute_jacobian_determinant(net.ddf)
num_neg_jacob = tf.math.count_nonzero(tf.less_equal(jacobian_det, 0), dtype=tf.float32,
name='negative_jacobians_number')
setattr(net, 'num_neg_jacob', num_neg_jacob)
# remove duplication of names
frame_index = utils.remove_duplicates([os.path.split(pair_names[0])[-1]
for pair_names in test_data_provider.target_atlas_image_names])
frame_columns = utils.remove_duplicates([os.path.split(pair_names[1][0])[-1][-39:]
for pair_names in test_data_provider.target_atlas_image_names])
# list the metrics that need saving
metrics_to_save = {'Dice': np.empty([len(frame_index), len(frame_columns)]),
'Jaccard': np.empty([len(frame_index), len(frame_columns)]),
'Myocardial Dice': np.empty([len(frame_index), len(frame_columns)]),
'LA Dice': np.empty([len(frame_index), len(frame_columns)]),
'LV Dice': np.empty([len(frame_index), len(frame_columns)]),
'RA Dice': np.empty([len(frame_index), len(frame_columns)]),
'RV Dice': np.empty([len(frame_index), len(frame_columns)]),
'AO Dice': np.empty([len(frame_index), len(frame_columns)]),
'PA Dice': np.empty([len(frame_index), len(frame_columns)]),
'# Negative Jacobians': np.empty([len(frame_index), len(frame_columns)]),
}
with tf.Session(config=config) as sess:
# Initialize variables
sess.run(tf.global_variables_initializer())
# Restore model parameters from previously saved model
net.restore(sess, model_path, var_list=net.variables_to_restore)
for idx, name in enumerate(test_data_provider.target_atlas_image_names):
target_name = os.path.split(test_data_provider.target_atlas_image_names[idx][0])[-1]
atlas_names = '-*-'.join([os.path.split(atlas_name)[-1] for atlas_name in
test_data_provider.target_atlas_image_names[idx][1]])
if args.space == 'commonspace1':
assert os.path.split(
test_model_data_provider.target_atlas_image_names[idx][0])[-1] == target_name.replace(
'commonspace1', 'commonspace2')
assert '-*-'.join(
[os.path.split(atlas_name)[-1]
for atlas_name in
test_model_data_provider.target_atlas_image_names[idx][1]]) == atlas_names.replace(
'commonspace1', 'commonspace2')
elif args.space == 'commonspace2':
assert os.path.split(test_model_data_provider.target_atlas_image_names[idx][0])[-1] == target_name
assert '-*-'.join(
[os.path.split(atlas_name)[-1]
for atlas_name in test_model_data_provider.target_atlas_image_names[idx][1]]) == atlas_names
logging.info("Fixed image: Target {:}, "
"Moving image: Atlas {:}".format(target_name, atlas_names))
# load data for network input
model_data = test_model_data_provider[idx]
# print(model_data['atlases_label'].shape, model_data['atlases_label'].dtype)
# load data for label propagation and result evaluation
test_data = test_data_provider[idx]
# perform atlas transformation
warped_atlas_image, warped_atlas_label, warped_atlas_weight, \
ddf, metrics = net.predict_scale(sess, model_data, test_data, args.dropout)
# save metrics for the current target-atlas pair
for k, v in metrics_to_save.items():
v[idx // len(frame_columns), idx % len(frame_columns)] = metrics[k]
# save output into Nifty files
# utils.save_prediction_nii(warped_atlas_image.squeeze(0), save_path, test_data_provider,
# data_type='image', name_index=idx,
# affine=test_data['target_affine'], header=test_data['target_header'],
# save_suffix=args.image_suffix, stage=args.reg_stage,
# # original_size=args.original_size
# )
utils.save_prediction_nii(warped_atlas_label.squeeze(0), save_path, test_data_provider,
data_type='label', name_index=idx,
affine=test_data['target_affine'], header=test_data['target_header'],
save_suffix=args.label_suffix, stage=args.reg_stage,
# original_size=args.original_size
)
if args.weight_suffix:
utils.save_prediction_nii(warped_atlas_weight.squeeze(0), save_path, test_data_provider,
data_type='image', name_index=idx,
affine=test_data['target_affine'], header=test_data['target_header'],
save_suffix=args.weight_suffix, save_dtype=np.float32,
squeeze_channel=False,
stage=args.reg_stage,
# original_size=args.original_size
)
if args.save_ddf:
utils.save_prediction_nii(ddf.squeeze((0, -2)), save_path, test_data_provider,
data_type='vector_fields', name_index=idx,
affine=test_data['target_affine'], header=test_data['target_header'],
stage=args.reg_stage, original_size=args.original_size)
# save metrics into DataFrames
metrics_DataFrames = {}
for k, v in metrics_to_save.items():
metrics_DataFrames[k] = pd.DataFrame(v, index=frame_index, columns=frame_columns, dtype=np.float32)
# save metrics into excel files
with pd.ExcelWriter(metrics_path) as writer:
for k, v in metrics_DataFrames.items():
v.to_excel(writer, sheet_name=k)
报错信息:
(tensor_py37) E:\pycharm_space\20210319MvMM-RegNet-master\src_3d>python save_prediction_pairwise.py --cuda_device 0 --atlas_search_path ../../../dataset/training_mr_20_commonspace2/*.nii.gz --model_path E:\\pycharm_space\\20210317MvMM-
RegNet-master\\src_3d\\baselines\\baseline_ecc
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\dtypes.py:516: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as
(type, (1,)) / '(1,)type'.
_np_qint8 = np.dtype([("qint8", np.int8, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\dtypes.py:517: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as
(type, (1,)) / '(1,)type'.
_np_quint8 = np.dtype([("quint8", np.uint8, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\dtypes.py:518: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as
(type, (1,)) / '(1,)type'.
_np_qint16 = np.dtype([("qint16", np.int16, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\dtypes.py:519: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as
(type, (1,)) / '(1,)type'.
_np_quint16 = np.dtype([("quint16", np.uint16, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\dtypes.py:520: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as
(type, (1,)) / '(1,)type'.
_np_qint32 = np.dtype([("qint32", np.int32, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\dtypes.py:525: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as
(type, (1,)) / '(1,)type'.
np_resource = np.dtype([("resource", np.ubyte, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorboard\compat\tensorflow_stub\dtypes.py:541: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be underst
ood as (type, (1,)) / '(1,)type'.
_np_qint8 = np.dtype([("qint8", np.int8, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorboard\compat\tensorflow_stub\dtypes.py:542: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be underst
ood as (type, (1,)) / '(1,)type'.
_np_quint8 = np.dtype([("quint8", np.uint8, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorboard\compat\tensorflow_stub\dtypes.py:543: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be underst
ood as (type, (1,)) / '(1,)type'.
_np_qint16 = np.dtype([("qint16", np.int16, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorboard\compat\tensorflow_stub\dtypes.py:544: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be underst
ood as (type, (1,)) / '(1,)type'.
_np_quint16 = np.dtype([("quint16", np.uint16, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorboard\compat\tensorflow_stub\dtypes.py:545: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be underst
ood as (type, (1,)) / '(1,)type'.
_np_qint32 = np.dtype([("qint32", np.int32, 1)])
D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorboard\compat\tensorflow_stub\dtypes.py:550: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be underst
ood as (type, (1,)) / '(1,)type'.
np_resource = np.dtype([("resource", np.ubyte, 1)])
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:28: The name tf.ConfigProto is deprecated. Please use tf.compat.v1.ConfigProto instead.
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\layers.py:124: The name tf.initializers.he_uniform is deprecated. Please use tf.compat.v1.initializers.he_uniform instead.
Current working directory: E:\pycharm_space\20210319MvMM-RegNet-master\src_3d
Working directory changed to: E:\pycharm_space\20210319MvMM-RegNet-master
model_dir
E:\\pycharm_space\\20210317MvMM-RegNet-master\\src_3d\\baselines\\baseline_ecc
args.latest_filename
best_checkpoint
ckpt
model_checkpoint_path: "E:\\\\pycharm_space\\\\20210317MvMM-RegNet-master\\\\src_3d\\\\baselines\\\\baseline_ecc\\./best_model.ckpt"
all_model_checkpoint_paths: "E:\\\\pycharm_space\\\\20210317MvMM-RegNet-master\\\\src_3d\\\\baselines\\\\baseline_ecc\\./best_model.ckpt"
2021-03-19 11:14:25,751 Number of atlases for each target: 1
2021-03-19 11:14:25,755 Number of targets: 40, loaded from directory: ../../../dataset/test_mr_40_commonspace2/*.nii.gz
2021-03-19 11:14:25,755 Number of atlases: 20, loaded from directory: ../../../dataset/training_mr_20_commonspace2/*.nii.gz
2021-03-19 11:14:25,755 Number of available atlases combinations for each target: 20
2021-03-19 11:14:25,757 Number of atlases for each target: 1
2021-03-19 11:14:25,760 Number of targets: 40, loaded from directory: ../../../dataset/test_mr_40_commonspace2/*.nii.gz
2021-03-19 11:14:25,760 Number of atlases: 20, loaded from directory: ../../../dataset/training_mr_20_commonspace2/*.nii.gz
2021-03-19 11:14:25,760 Number of available atlases combinations for each target: 20
2021-03-19 11:14:25,761 Number of target-atlas pairs: 800
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:94: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.
2021-03-19 11:14:25,773 From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:94: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C4F4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C4F4C8>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:25,818 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C4F4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C4F4C8>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:1012: The name tf.Dimension is deprecated. Please use tf.compat.v1.Dimension instead.
2021-03-19 11:14:25,860 From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:1012: The name tf.Dimension is deprecated. Please use tf.compat.v1.Dimension instead.
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C24708>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C24708>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:26,243 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C24708>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C24708>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C83908>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C83908>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:26,553 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C83908>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E19C83908>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:125: The name tf.variable_scope is deprecated. Please use tf.compat.v1.variable_scope instead.
2021-03-19 11:14:26,931 From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:125: The name tf.variable_scope is deprecated. Please use tf.compat.v1.variable_scope instead.
32
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1A9C2188>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1A9C2188>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:27,671 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1A9C2188>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1A9C2188>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:1340: The name tf.log is deprecated. Please use tf.math.log instead.
2021-03-19 11:14:28,049 From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:1340: The name tf.log is deprecated. Please use tf.math.log instead.
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:1320: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.where in 2.0, which has the same broadcast rule as np.where
2021-03-19 11:14:28,070 From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\utils.py:1320: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.where in 2.0, which has the same broadcast rule as np.where
WARNING:tensorflow:Entity <bound method Resize.call of <core.layers.Resize object at 0x0000013E1A99EE48>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When filing the bug, set the verb
osity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method Resize.call of <core.layers.Resize object at 0x0000013E1A99EE48>>: AttributeError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:29,940 Entity <bound method Resize.call of <core.layers.Resize object at 0x0000013E1A99EE48>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When filing the bug, set the
verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method Resize.call of <core.layers.Resize object at 0x0000013E1A99EE48>>: AttributeError: module 'gast' has no attribute
'Num'
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:30,234 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:30,505 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When fili
ng the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>>: Attr
ibuteError: module 'gast' has no attribute 'Num'
2021-03-19 11:14:30,653 Entity <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>> could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When
filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting <bound method SpatialTransformer.call of <core.layers.SpatialTransformer object at 0x0000013E1C7AC4C8>>:
AttributeError: module 'gast' has no attribute 'Num'
WARNING:tensorflow:From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:371: The name tf.train.Saver is deprecated. Please use tf.compat.v1.train.Saver instead.
2021-03-19 11:14:31,495 From E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:371: The name tf.train.Saver is deprecated. Please use tf.compat.v1.train.Saver instead.
WARNING:tensorflow:From D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py:1276: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a
future version.
Instructions for updating:
Use standard file APIs to check for files with this prefix.
2021-03-19 11:14:31,664 From D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py:1276: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed
in a future version.
Instructions for updating:
Use standard file APIs to check for files with this prefix.
INFO:tensorflow:Restoring parameters from E:\\pycharm_space\\20210317MvMM-RegNet-master\\src_3d\\baselines\\baseline_ecc\./best_model.ckpt
2021-03-19 11:14:31,668 Restoring parameters from E:\\pycharm_space\\20210317MvMM-RegNet-master\\src_3d\\baselines\\baseline_ecc\./best_model.ckpt
Traceback (most recent call last):
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 1356, in _do_call
return fn(*args)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 1341, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 1429, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.NotFoundError: Key network/unet/down_conv_0/conv_block_layer0/conv/kernel not found in checkpoint
[[{<!-- -->{node save/RestoreV2}}]]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 1286, in restore
{self.saver_def.filename_tensor_name: save_path})
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 950, in run
run_metadata_ptr)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 1173, in _run
feed_dict_tensor, options, run_metadata)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 1350, in _do_run
run_metadata)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\client\session.py", line 1370, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.NotFoundError: Key network/unet/down_conv_0/conv_block_layer0/conv/kernel not found in checkpoint
[[node save/RestoreV2 (defined at E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:371) ]]
Original stack trace for 'save/RestoreV2':
File "save_prediction_pairwise.py", line 247, in <module>
net.restore(sess, model_path, var_list=net.variables_to_restore)
File "E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py", line 371, in restore
saver = tf.train.Saver(**kwargs)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 825, in __init__
self.build()
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 837, in build
self._build(self._filename, build_save=True, build_restore=True)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 875, in _build
build_restore=build_restore)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 508, in _build_internal
restore_sequentially, reshape)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 328, in _AddRestoreOps
restore_sequentially)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 575, in bulk_restore
return io_ops.restore_v2(filename_tensor, names, slices, dtypes)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\ops\gen_io_ops.py", line 1779, in restore_v2
name=name)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\util\deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\ops.py", line 3616, in create_op
op_def=op_def)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\ops.py", line 2005, in __init__
self._traceback = tf_stack.extract_stack()
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 1296, in restore
names_to_keys = object_graph_key_mapping(save_path)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 1614, in object_graph_key_mapping
object_graph_string = reader.get_tensor(trackable.OBJECT_GRAPH_PROTO_KEY)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\pywrap_tensorflow_internal.py", line 678, in get_tensor
return CheckpointReader_GetTensor(self, compat.as_bytes(tensor_str))
tensorflow.python.framework.errors_impl.NotFoundError: Key _CHECKPOINTABLE_OBJECT_GRAPH not found in checkpoint
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "save_prediction_pairwise.py", line 247, in <module>
net.restore(sess, model_path, var_list=net.variables_to_restore)
File "E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py", line 372, in restore
saver.restore(sess, model_path)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 1302, in restore
err, "a Variable name or other graph key that is missing")
tensorflow.python.framework.errors_impl.NotFoundError: Restoring from checkpoint failed. This is most likely due to a Variable name or other graph key that is missing from the checkpoint. Please ensure that you have not altered the gra
ph expected based on the checkpoint. Original error:
Key network/unet/down_conv_0/conv_block_layer0/conv/kernel not found in checkpoint
[[node save/RestoreV2 (defined at E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py:371) ]]
Original stack trace for 'save/RestoreV2':
File "save_prediction_pairwise.py", line 247, in <module>
net.restore(sess, model_path, var_list=net.variables_to_restore)
File "E:\pycharm_space\20210319MvMM-RegNet-master\src_3d\core\model_ddf_mvmm_label_base.py", line 371, in restore
saver = tf.train.Saver(**kwargs)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 825, in __init__
self.build()
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 837, in build
self._build(self._filename, build_save=True, build_restore=True)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 875, in _build
build_restore=build_restore)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 508, in _build_internal
restore_sequentially, reshape)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 328, in _AddRestoreOps
restore_sequentially)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\training\saver.py", line 575, in bulk_restore
return io_ops.restore_v2(filename_tensor, names, slices, dtypes)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\ops\gen_io_ops.py", line 1779, in restore_v2
name=name)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\util\deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\ops.py", line 3616, in create_op
op_def=op_def)
File "D:\software\Anaconda3\envs\tensor_py37\lib\site-packages\tensorflow\python\framework\ops.py", line 2005, in __init__
self._traceback = tf_stack.extract_stack()
这个问题困扰我好久了,求大神帮忙看看怎么回事
楼主请把代码粘上来。