transformer_patchsize_x=None# Patch size of vision transformer patches in x direction.
transformer_patchsize_y=None# Patch size of vision transformer patches in y direction.
transformer_num_patches_xy=None# Number of patches for vision transformer in x and y direction respectively.
transformer_projection_dim=64# Transformer projection dimension. Default value is 64.
transformer_mlp_head_units=[128,64]# Transformer Multilayer Perceptron (MLP) head units. Default value is [128, 64]
transformer_layers=8# transformer layers. Default value is 8.
transformer_num_heads=4# Transformer number of heads. Default value is 4.
transformer_cnn_first=True# We have two types of vision transformers. In one type, a CNN is applied first, followed by a transformer. In the other type, this order is reversed. If transformer_cnn_first is true, it means the CNN will be applied before the transformer. Default value is true.
index_start=0# Index of model to continue training from. E.g. if you trained for 3 epochs and last index is 2, to continue from model_1.h5, set "index_start" to 3 to start naming model with index 3.
dir_of_start_model=''# Directory containing pretrained encoder to continue training the model.
is_loss_soft_dice=False# Use soft dice as loss function. When set to true, "weighted_loss" must be false.
##print("Error: transformer num patches error. Parameter transformer_num_patches_xy should be set to (input_width/32) = {} and (input_height/32) = {}".format(int(input_width / 32), int(input_height / 32)) )
##sys.exit(1)
#if not (transformer_patchsize == 1):
#print("Error: transformer patchsize error. Parameter transformer_patchsizeshould set to 1" )
print("Error: transformer_patchsize_y or transformer_num_patches_xy height value error . input_height should be equal to ( transformer_num_patches_xy height value * transformer_patchsize_y * 32)")
print("Error: transformer_projection_dim error. The remainder when parameter transformer_projection_dim is divided by (transformer_patchsize_y*transformer_patchsize_x) should be zero")
print("Error: transformer_patchsize_y or transformer_num_patches_xy height value error . input_height should be equal to ( transformer_num_patches_xy height value * transformer_patchsize_y)")
print("Error: transformer_patchsize_x or transformer_num_patches_xy width value error . input_width should be equal to ( transformer_num_patches_xy width value * transformer_patchsize_x)")
print("Error: transformer_projection_dim error. The remainder when parameter transformer_projection_dim is divided by (transformer_patchsize_y*transformer_patchsize_x) should be zero")