Untitled
unknown
python
a year ago
1.3 kB
3
Indexable
import tensorflow as tf import numpy as np def predict_and_calculate_bce_losses(model, images, labels): # Check the available GPUs gpus = tf.config.experimental.list_physical_devices('GPU') if not gpus: raise SystemError("No GPUs found for parallel processing") # Split images and labels for each GPU num_gpus = len(gpus) image_splits = np.array_split(images, num_gpus) label_splits = np.array_split(labels, num_gpus) # Initialize BCE loss function bce_loss_function = tf.keras.losses.BinaryCrossentropy() # List to store all BCE losses all_bce_losses = [] # Predict and calculate BCE loss for each GPU's data for i in range(num_gpus): with tf.device(f'/GPU:{i}'): # Predict on this split predictions = model.predict(image_splits[i]) # Calculate BCE loss for each prediction losses = bce_loss_function(label_splits[i], predictions) # Store the losses all_bce_losses.extend(losses.numpy()) return np.array(all_bce_losses) # Usage of the function # model: Your pre-trained model # images: Numpy array of your images # labels: Numpy array of your labels # bce_losses = predict_and_calculate_bce_losses(model, images, labels)
Editor is loading...
Leave a Comment