diff --git a/GUI_main.py b/GUI_main.py
index 21f14d1e553376540e168390c8f14529b27c81b9..ad08c1a93a11744283079f39980621c09507e727 100644
--- a/GUI_main.py
+++ b/GUI_main.py
@@ -675,7 +675,8 @@ class App(QMainWindow):
                         seg_val = int(dlg.entry_segmentation.text())
                     else:
                         seg_val = 10
-                    self.PredThreshSeg(t, dlg.listfov.row(item), thr_val, seg_val)
+                    self.PredThreshSeg(t, dlg.listfov.row(item), thr_val, seg_val,
+                                       dlg.pc_checkbox.isChecked())
                     
                     # apply tracker if wanted and if not at first time
                     if dlg.tracking_checkbox.isChecked():
@@ -690,7 +691,8 @@ class App(QMainWindow):
         self.EnableCNNButtons()
 
     
-    def PredThreshSeg(self, timeindex, fovindex, thr_val, seg_val):
+    def PredThreshSeg(self, timeindex, fovindex, thr_val, seg_val, 
+                      is_pc):
           """
           This function is called in the LaunchBatchPrediction function.
           This function calls the neural network function in the
@@ -700,18 +702,23 @@ class App(QMainWindow):
           segmentation. 
           """
           im = self.reader.LoadOneImage(timeindex, fovindex)
-          pred = self.LaunchPrediction(im)
+          pred = self.LaunchPrediction(im, is_pc)
           thresh = self.ThresholdPred(thr_val, pred)
           seg = segment(thresh, pred, seg_val)
           self.reader.SaveMask(timeindex, fovindex, seg)
           
-    def LaunchPrediction(self, im):
+    def LaunchPrediction(self, im, is_pc):
         """It launches the neural neutwork on the current image and creates 
         an hdf file with the prediction for the time T and corresponding FOV. 
         """
-        im = skimage.exposure.equalize_adapthist(im)
-        im = im*1.0;	
-        pred = nn.prediction(im)
+        if is_pc:
+            im = skimage.exposure.equalize_adapthist(im)
+            im = im*1.0;	
+            pred = nn.prediction(im)
+        else:
+            im = skimage.exposure.equalize_adapthist(im)
+            im = im*1.0;	
+            pred = nn.prediction(im)
         return pred
 
 
@@ -1222,7 +1229,7 @@ class App(QMainWindow):
             self.id3 = self.m.mpl_connect('button_release_event', self.m.ReleaseClick)
 
             pixmap = QtGui.QPixmap('./icons/brush2.png')
-            cursor = QtGui.QCursor(pixmap, -1,-1)
+            cursor = QtGui.QCursor(pixmap, 0,9)
             QApplication.setOverrideCursor(cursor)
         
         elif self.button_eraser.isChecked():
@@ -1237,7 +1244,7 @@ class App(QMainWindow):
             self.id3 = self.m.mpl_connect('button_release_event', self.m.ReleaseClick)
             
             pixmap = QtGui.QPixmap('./icons/eraser.png')
-            cursor = QtGui.QCursor(pixmap, -1, -1)
+            cursor = QtGui.QCursor(pixmap, 5, 24)
             QApplication.setOverrideCursor(cursor)
             
         else:
diff --git a/unet/LaunchBatchPrediction.py b/unet/LaunchBatchPrediction.py
index 0176bab6fff49f5249f01d2b837253526f4fa000..ad1f22466f3fdc4fe0c4fd93b20f29d6572e5a5e 100644
--- a/unet/LaunchBatchPrediction.py
+++ b/unet/LaunchBatchPrediction.py
@@ -42,6 +42,9 @@ class CustomDialog(QDialog):
         self.tracking_checkbox = QCheckBox()
         self.tracking_checkbox.setChecked(True)
         
+        self.pc_checkbox = QCheckBox()
+        self.pc_checkbox.setChecked(True)
+        
         flo = QFormLayout()
         flo.addWidget(self.labeltime)
         flo.addRow('Lower Boundary for time axis', self.entry1)
@@ -50,6 +53,7 @@ class CustomDialog(QDialog):
         flo.addRow('Enter a threshold value', self.entry_threshold)
         flo.addRow('Enter a segmentation value', self.entry_segmentation)
         flo.addRow('Apply Cell Tracker', self.tracking_checkbox)
+        flo.addRow('Image is Phase Contrast', self.pc_checkbox)
         
         QBtn = QDialogButtonBox.Ok | QDialogButtonBox.Cancel