Thank you for sharing this code.
I'm a bit puzzled about this error. My training images are the same training set referenced in the article, and their shape is (216, 256). I can't see anything in the down-sampling code that is changing the dimensions, but I must be missing something.
input_1 (InputLayer) (None, 216, 256, 1) 0
conv2d_1 (Conv2D) (None, 216, 256, 64) 640 input_1[0][0]
batch_normalization_1 (BatchNorm (None, 216, 256, 64) 256 conv2d_1[0][0]
activation_1 (Activation) (None, 216, 256, 64) 0 batch_normalization_1[0][0]
conv2d_2 (Conv2D) (None, 216, 256, 64) 36928 activation_1[0][0]
batch_normalization_2 (BatchNorm (None, 216, 256, 64) 256 conv2d_2[0][0]
activation_2 (Activation) (None, 216, 256, 64) 0 batch_normalization_2[0][0]
max_pooling2d_1 (MaxPooling2D) (None, 108, 128, 64) 0 activation_2[0][0]
conv2d_3 (Conv2D) (None, 108, 128, 128) 73856 max_pooling2d_1[0][0]
batch_normalization_3 (BatchNorm (None, 108, 128, 128) 512 conv2d_3[0][0]
activation_3 (Activation) (None, 108, 128, 128) 0 batch_normalization_3[0][0]
conv2d_4 (Conv2D) (None, 108, 128, 128) 147584 activation_3[0][0]
batch_normalization_4 (BatchNorm (None, 108, 128, 128) 512 conv2d_4[0][0]
activation_4 (Activation) (None, 108, 128, 128) 0 batch_normalization_4[0][0]
max_pooling2d_2 (MaxPooling2D) (None, 54, 64, 128) 0 activation_4[0][0]
conv2d_5 (Conv2D) (None, 54, 64, 256) 295168 max_pooling2d_2[0][0]
batch_normalization_5 (BatchNorm (None, 54, 64, 256) 1024 conv2d_5[0][0]
activation_5 (Activation) (None, 54, 64, 256) 0 batch_normalization_5[0][0]
conv2d_6 (Conv2D) (None, 54, 64, 256) 590080 activation_5[0][0]
batch_normalization_6 (BatchNorm (None, 54, 64, 256) 1024 conv2d_6[0][0]
activation_6 (Activation) (None, 54, 64, 256) 0 batch_normalization_6[0][0]
max_pooling2d_3 (MaxPooling2D) (None, 27, 32, 256) 0 activation_6[0][0]
conv2d_7 (Conv2D) (None, 27, 32, 512) 1180160 max_pooling2d_3[0][0]
batch_normalization_7 (BatchNorm (None, 27, 32, 512) 2048 conv2d_7[0][0]
activation_7 (Activation) (None, 27, 32, 512) 0 batch_normalization_7[0][0]
conv2d_8 (Conv2D) (None, 27, 32, 512) 2359808 activation_7[0][0]
batch_normalization_8 (BatchNorm (None, 27, 32, 512) 2048 conv2d_8[0][0]
activation_8 (Activation) (None, 27, 32, 512) 0 batch_normalization_8[0][0]
max_pooling2d_4 (MaxPooling2D) (None, 13, 16, 512) 0 activation_8[0][0]
conv2d_9 (Conv2D) (None, 13, 16, 1024) 4719616 max_pooling2d_4[0][0]
batch_normalization_9 (BatchNorm (None, 13, 16, 1024) 4096 conv2d_9[0][0]
activation_9 (Activation) (None, 13, 16, 1024) 0 batch_normalization_9[0][0]
conv2d_10 (Conv2D) (None, 13, 16, 1024) 9438208 activation_9[0][0]
batch_normalization_10 (BatchNor (None, 13, 16, 1024) 4096 conv2d_10[0][0]
activation_10 (Activation) (None, 13, 16, 1024) 0 batch_normalization_10[0][0]
conv2d_transpose_1 (Conv2DTransp (None, 26, 32, 512) 2097664 activation_10[0][0]
cropping2d_1 (Cropping2D) (None, 26, 32, 512) 0 activation_8[0][0]
concatenate_1 (Concatenate) (None, 26, 32, 1024) 0 conv2d_transpose_1[0][0]
cropping2d_1[0][0]
conv2d_11 (Conv2D) (None, 26, 32, 512) 4719104 concatenate_1[0][0]
batch_normalization_11 (BatchNor (None, 26, 32, 512) 2048 conv2d_11[0][0]
activation_11 (Activation) (None, 26, 32, 512) 0 batch_normalization_11[0][0]
conv2d_12 (Conv2D) (None, 26, 32, 512) 2359808 activation_11[0][0]
batch_normalization_12 (BatchNor (None, 26, 32, 512) 2048 conv2d_12[0][0]
activation_12 (Activation) (None, 26, 32, 512) 0 batch_normalization_12[0][0]
conv2d_transpose_2 (Conv2DTransp (None, 52, 64, 256) 524544 activation_12[0][0]
cropping2d_2 (Cropping2D) (None, 52, 64, 256) 0 activation_6[0][0]
concatenate_2 (Concatenate) (None, 52, 64, 512) 0 conv2d_transpose_2[0][0]
cropping2d_2[0][0]
conv2d_13 (Conv2D) (None, 52, 64, 256) 1179904 concatenate_2[0][0]
batch_normalization_13 (BatchNor (None, 52, 64, 256) 1024 conv2d_13[0][0]
activation_13 (Activation) (None, 52, 64, 256) 0 batch_normalization_13[0][0]
conv2d_14 (Conv2D) (None, 52, 64, 256) 590080 activation_13[0][0]
batch_normalization_14 (BatchNor (None, 52, 64, 256) 1024 conv2d_14[0][0]
activation_14 (Activation) (None, 52, 64, 256) 0 batch_normalization_14[0][0]
conv2d_transpose_3 (Conv2DTransp (None, 104, 128, 128) 131200 activation_14[0][0]
cropping2d_3 (Cropping2D) (None, 104, 128, 128) 0 activation_4[0][0]
concatenate_3 (Concatenate) (None, 104, 128, 256) 0 conv2d_transpose_3[0][0]
cropping2d_3[0][0]
conv2d_15 (Conv2D) (None, 104, 128, 128) 295040 concatenate_3[0][0]
batch_normalization_15 (BatchNor (None, 104, 128, 128) 512 conv2d_15[0][0]
activation_15 (Activation) (None, 104, 128, 128) 0 batch_normalization_15[0][0]
conv2d_16 (Conv2D) (None, 104, 128, 128) 147584 activation_15[0][0]
batch_normalization_16 (BatchNor (None, 104, 128, 128) 512 conv2d_16[0][0]
activation_16 (Activation) (None, 104, 128, 128) 0 batch_normalization_16[0][0]
conv2d_transpose_4 (Conv2DTransp (None, 208, 256, 64) 32832 activation_16[0][0]
cropping2d_4 (Cropping2D) (None, 208, 256, 64) 0 activation_2[0][0]
concatenate_4 (Concatenate) (None, 208, 256, 128) 0 conv2d_transpose_4[0][0]
cropping2d_4[0][0]
conv2d_17 (Conv2D) (None, 208, 256, 64) 73792 concatenate_4[0][0]
batch_normalization_17 (BatchNor (None, 208, 256, 64) 256 conv2d_17[0][0]
activation_17 (Activation) (None, 208, 256, 64) 0 batch_normalization_17[0][0]
conv2d_18 (Conv2D) (None, 208, 256, 64) 36928 activation_17[0][0]
batch_normalization_18 (BatchNor (None, 208, 256, 64) 256 conv2d_18[0][0]
activation_18 (Activation) (None, 208, 256, 64) 0 batch_normalization_18[0][0]
conv2d_19 (Conv2D) (None, 208, 256, 2) 130 activation_18[0][0]
lambda_1 (Lambda) (None, 208, 256, 2) 0 conv2d_19[0][0]
activation_19 (Activation) (None, 208, 256, 2) 0 lambda_1[0][0]