#!/usr/bin/env python # coding: utf-8 # In[2]: import os # for seeing the directoy import numpy as np # for our math functions import skimage.io as io # for reading and saving images from skimage import filters, measure, morphology # morphological filters from skimage.feature import peak_local_max # function to find local maxima from scipy import optimize, ndimage # for curve fitting get_ipython().run_line_magic('matplotlib', 'notebook') import matplotlib.pyplot as plt # for plotting import cv2 # image processing toolbox import openpiv.tools # particle image velocimetry toolbox import openpiv.process import openpiv.scaling import openpiv.validation import openpiv.filters import timeit # tool to track processing time # In[261]: mag_vect_ave=[] # In[262]: # read in the images and mask frame = io.imread(r'C:\Users\mdkil\Desktop\Data\PIV\Yolk\5_17_19DL\87.tif') frame_reference = io.imread(r'C:\Users\mdkil\Desktop\Data\PIV\Yolk\5_17_19DL\88.tif') mask2 = io.imread(r'C:\Users\mdkil\Desktop\Data\PIV\Yolk\5_17_19DL\mask.tif') #frame_reference = io.imread(r'C:\Users\mdkil\Desktop\Data\PIV\Yolk\5_17_19DL\88.tif') #determine the number of rows and columns nrows, ncols = frame.shape # In[263]: # subtract the mean and divide by the standard deviation of each image frame_norm = (frame - np.mean(frame)) / np.std(frame) frame_reference_norm = (frame_reference - np.mean(frame_reference)) / np.std(frame_reference) # Perform the cross correlation cross_corr = cv2.filter2D(frame_norm, cv2.CV_32S, frame_reference_norm) # Divide by the number of rows and columns in the image cross_corr = cross_corr/nrows/ncols # Plot the cross correlation cross_corr_fig, cross_corr_axes = plt.subplots() cross_corr_axes.imshow(cross_corr) cross_corr_fig.show() # Find the max value in the cross correlation image and where it is cross_corr_max = np.max(cross_corr) cross_corr_pos = np.argwhere(cross_corr == cross_corr_max) print('Maximum Value :', cross_corr_max, '\nPosition :', cross_corr_pos) # calculate the column and row shifts for the reference image row_shift = cross_corr_pos[0,0]-nrows/2 col_shift = cross_corr_pos[0,1]-ncols/2 print('Shift in rows :', row_shift, '\nShift in cols :', col_shift) # shift the reference image so that it's registered to the first image frame_reference = np.roll(frame_reference, row_shift.astype('int'), 0) frame_reference = np.roll(frame_reference, col_shift.astype('int'), 1) # In[264]: #set images to the openPIV-required int32 images image = frame.astype('int32') image_ref = frame_reference.astype('int32') # Perform the PIV U, V, sig2noise = openpiv.process.extended_search_area_piv(image, image_ref, window_size=32, overlap=16, dt=1, search_area_size=36, sig2noise_method='peak2peak') # In[265]: # Get the coordinates of the correlation windows X, Y = openpiv.process.get_coordinates(image_size=frame.shape, window_size=32, overlap=16) # In[266]: # Find the magnitude of the displacement field dist_mag = np.sqrt(U**2 + V**2 ) # Create a histogram counts_disp, bins_disp = np.histogram(dist_mag.ravel(), bins=np.arange(0,50)) dist_fig, dist_axes = plt.subplots() dist_axes.scatter(bins_disp[:-1], counts_disp) dist_fig.show() # In[267]: # Set a threhshold for the signal to noise of the cross correlation U, V, mask = openpiv.validation.sig2noise_val(U, V, sig2noise, threshold = 1.0) U * mask; V * mask; # Plot the filtered field vector_fil_fig, vector_fil_axes = plt.subplots() vector_fil_axes.imshow(np.flipud(mask2), cmap = 'Greys') vector_fil_axes.quiver(X,Y,U,V, color = 'red') vector_fil_fig.show() # In[268]: #Here I'm removing vectors that occur outside the mask mask3 = np.flipud(mask2) for i in np.arange(0,63): for j in np.arange(0,63): if (mask3[int(Y[i,j]),int(X[i,j])])==255: U[i,j]=0 V[i,j]=0 # In[269]: #Here I doing another round of thresholding vectors U, V, mask = openpiv.validation.sig2noise_val(U, V, sig2noise, threshold = 1) # In[270]: #Here I'm removing unreasonibly large vectors maskU = np.abs(U)<=8 U=U*maskU maskV = np.abs(V)<=8 V=V*maskV # In[271]: #Here I'm desplaying the vectors which survided threshholding and mask subtraction #Here I save the image for later display w_fig, w_axes = plt.subplots() w_axes.quiver(X,Y,U,V, color = 'black') w_axes.imshow(np.flipud(mask2), cmap = 'Greys') plt.axis('off') w_fig.show() #w_fig.savefig(r'C:\Users\mdkil\Desktop\Bio Pys\ORff\wholeout\9.tif',bbox_inches='tight',dpi=1000) # In[272]: #Here I am generating a quiver plot of the vecoters, without the mask q_fig, q_axes = plt.subplots() q_axes.quiver(X,Y,U,V, color = 'green') q_axes.imshow(np.flipud(frame), cmap = 'Greys') plt.axis('off') q_fig.show() q_fig.savefig(r'C:\Users\mdkil\Desktop\Bio Pys\YG3ff\8_11_18\wholeout\9.tif',bbox_inches='tight',dpi=500) # In[273]: #Here I am caculating the vector lengths by applying the distance equation to the x and y components V2 = V.copy() U2 = V.copy() D = np.sqrt(U2**2 +V2**2) # In[274]: #To average the magnitude of the vectors I have to remove the 'NaN's and '0's from the array. #Here I'm turning the 0s into NaN so I just have to do 1 removal step D[D== 0]= 'NaN' # In[275]: #Here I'm removing all the NaNs D = D[~np.isnan(D)] print(D) # In[276]: #Here I'm determining the average magnitude of the vectors np.average(D) # In[277]: #add the average to a list mag_vect_ave.append(np.average(D)) print(mag_vect_ave) # In[278]: #Saves the averages to an .csv #remove '#'when all the images have been processed #np.savetxt(r'C:\Users\mdkil\Desktop\Bio Pys\YG3ff\8_11_18\wholeout\YG3_mag_vect_ave.csv', mag_vect_ave, delimiter=",", fmt='%s') # In[ ]: