Skip to content
Snippets Groups Projects
Commit cd6513ae authored by pjm363 (Philip Monaco)'s avatar pjm363 (Philip Monaco)
Browse files

Merge branch '13-update-eda' into 'main'

Resolve "Update EDA"

Closes #13

See merge request !10
parents 54aa6182 6591fcc3
No related branches found
No related tags found
1 merge request!10Resolve "Update EDA"
...@@ -23,7 +23,7 @@ def find_mean_img(full_mat): ...@@ -23,7 +23,7 @@ def find_mean_img(full_mat):
# calculate the average # calculate the average
mean_img = np.mean(full_mat[mat], axis = 0) mean_img = np.mean(full_mat[mat], axis = 0)
# reshape it back to a matrix # reshape it back to a matrix
mean_img = mean_img.reshape((200, 150)) mean_img = mean_img.reshape((170, 120))
ax = fig.add_subplot(rows, cols,i+1) ax = fig.add_subplot(rows, cols,i+1)
ax.imshow(mean_img, vmin=0, vmax=255, cmap='Greys_r') ax.imshow(mean_img, vmin=0, vmax=255, cmap='Greys_r')
ax.set_title('Average ' + mat) ax.set_title('Average ' + mat)
...@@ -31,7 +31,7 @@ def find_mean_img(full_mat): ...@@ -31,7 +31,7 @@ def find_mean_img(full_mat):
plt.tight_layout() plt.tight_layout()
def plot_pca(pca, title, size = (200, 150)): def plot_pca(pca, title, size = (170, 120)):
"""Plots each decomposed PCA image and labels the amount of variability for each image. """Plots each decomposed PCA image and labels the amount of variability for each image.
Args: Args:
...@@ -57,7 +57,7 @@ def plot_pca(pca, title, size = (200, 150)): ...@@ -57,7 +57,7 @@ def plot_pca(pca, title, size = (200, 150)):
plt.show() plt.show()
def eigenimages(full_mat,n_comp = 0.7, size = (200, 150)): def eigenimages(full_mat,n_comp = 0.7, size = (170, 120)):
"""Creates creates and fits a PCA estimator from sklearn. """Creates creates and fits a PCA estimator from sklearn.
Args: Args:
...@@ -69,6 +69,7 @@ def eigenimages(full_mat,n_comp = 0.7, size = (200, 150)): ...@@ -69,6 +69,7 @@ def eigenimages(full_mat,n_comp = 0.7, size = (200, 150)):
sklearn PCA object: Fitted PCA model. sklearn PCA object: Fitted PCA model.
""" """
# fit PCA to describe n_comp * variability in the class # fit PCA to describe n_comp * variability in the class
print(full_mat, n_comp)
pca = PCA(n_components = n_comp, whiten = True) pca = PCA(n_components = n_comp, whiten = True)
pca.fit(full_mat) pca.fit(full_mat)
......
This diff is collapsed.
...@@ -2,7 +2,9 @@ import os ...@@ -2,7 +2,9 @@ import os
import shutil import shutil
import pandas as pd import pandas as pd
import tensorflow as tf import tensorflow as tf
from PIL import Image
from tensorflow.keras.preprocessing import image from tensorflow.keras.preprocessing import image
from tensorflow.keras.layers import CenterCrop
import numpy as np import numpy as np
from tqdm import tqdm from tqdm import tqdm
...@@ -36,16 +38,23 @@ def load_sort_data(meta_filename = str, image_folder = str, output_folder = str) ...@@ -36,16 +38,23 @@ def load_sort_data(meta_filename = str, image_folder = str, output_folder = str)
return metadata, dest_dir return metadata, dest_dir
def transform(path, size = (300, 225)): def transform(path, size = (200, 150)):
# create a list of images # create a list of images
img_list = [fn for fn in os.listdir(path) if fn.endswith('.jpg')] img_list = [fn for fn in os.listdir(path) if fn.endswith('.jpg')]
(left, upper, right, lower) = (15, 15, 135,185)
#iterating over each .jpg #iterating over each .jpg
for fn in tqdm(img_list): for fn in tqdm(img_list):
fp = path + '/' + fn fp = path + '/' + fn
current_image = image.load_img(fp, target_size = size, current_image = image.load_img(fp, target_size = size,
color_mode = 'grayscale') color_mode = 'grayscale')
# covert image to a matrix crop_image = current_image.crop((left, upper, right, lower))
img_ts = image.img_to_array(current_image)
# print("current image", type(current_image), current_image)
# print("crop image", type(crop_image), crop_image)
# convert image to a matrix
img_ts = image.img_to_array(crop_image)
# print(type(img_ts), img_ts)
# turn that into a vector / 1D array # turn that into a vector / 1D array
img_ts = [img_ts.ravel()] img_ts = [img_ts.ravel()]
try: try:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment