Training and validation data are already rescaled to have values between 0 and 1.ĭef plot_slices ( num_rows, num_columns, width, height, data ): """Plot a montage of 20 CT slices""" data = np. While defining the train and validation data loader, the training data is passed throughĪnd augmentation function which randomly rotates volume at different angles. expand_dims ( volume, axis = 3 ) return volume, label expand_dims ( volume, axis = 3 ) return volume, label def validation_preprocessing ( volume, label ): """Process validation data by only adding a channel.""" volume = tf. float32 ) return augmented_volume def train_preprocessing ( volume, label ): """Process training data by rotating and adding a channel.""" # Rotate volume volume = rotate ( volume ) volume = tf. rotate ( volume, angle, reshape = False ) volume = 1 return volume augmented_volume = tf. choice ( angles ) # rotate volume volume = ndimage. Import random from scipy import ndimage def rotate ( volume ): """Rotate the volume by a few degrees""" def scipy_rotate ( volume ): # define some rotation angles angles = # pick angles at random angle = random. concatenate (( abnormal_labels, normal_labels ), axis = 0 ) print ( "Number of samples in train and validation are %d and %d. concatenate (( abnormal_scans, normal_scans ), axis = 0 ) y_val = np. concatenate (( abnormal_labels, normal_labels ), axis = 0 ) x_val = np. concatenate (( abnormal_scans, normal_scans ), axis = 0 ) y_train = np. array () # Split data in the ratio 70-30 for training and validation. array () # For the CT scans having presence of viral pneumonia # assign 1, for the normal ones assign 0. # Each scan is resized across height, width, and depth and rescaled. Let's read the paths of the CT scans from the class directories. zoom ( img, ( width_factor, height_factor, depth_factor ), order = 1 ) return img def process_scan ( path ): """Read and resize volume""" # Read scan volume = read_nifti_file ( path ) # Normalize volume = normalize ( volume ) # Resize width, height and depth volume = resize_volume ( volume ) return volume rotate ( img, 90, reshape = False ) # Resize across z-axis img = ndimage. shape # Compute depth factor depth = current_depth / desired_depth width = current_width / desired_width height = current_height / desired_height depth_factor = 1 / depth width_factor = 1 / width height_factor = 1 / height # Rotate img = ndimage. astype ( "float32" ) return volume def resize_volume ( img ): """Resize across z-axis""" # Set the desired depth desired_depth = 64 desired_width = 128 desired_height = 128 # Get current depth current_depth = img. get_fdata () return scan def normalize ( volume ): """Normalize the volume""" min = - 1000 max = 400 volume = max volume = ( volume - min ) / ( max - min ) volume = volume. load ( filepath ) # Get raw data scan = scan. Import nibabel as nib from scipy import ndimage def read_nifti_file ( filepath ): """Read and load volume""" # Read file scan = nib. Will be used when building training and validation datasets. Here we define several helper functions to process the data.
#KERAS DATA AUGMENTATION 3D INSTALL#
You can install the package via pip install nibabel. The files are provided in Nifti format with the extension.