Donate. I desperately need donations to survive due to my health

Get paid by answering surveys Click here

Click here to donate

Remote/Work from Home jobs

How to create own dataset with tensorflow?

I am currently creating working on an image classifier and It's giving me a problem, I am Not sure what the problem is but i suspect that it might involove, Tensorflow api because i am trying out feed dict and its not working out error The following is the code for my model:

Train_dir='C:\\Users\\Desktop\\Rock_data'
Test_dir='C:\\Users\\Desktop\\Rock_data'
img_size=2500
LR=0.001
training_iterations=2000
batch_size=10
no_classes=10
dropout=0.8
display_step=10
Model_name='RockId-{}-{}.model.'.format(LR, '2Conv-basic')

def label_img(img):
word_label=img.split(',')[-1]
if word_label=='sedimentary': return[1,0]
elif word_label=='metamorphic':return[0.1]
elif word_label=='igneous':return[1,1]

def create_train_data():
    for img in tqdm(os.listdir(Train_dir)):
     training_data = []
    label=label_img(img)
    path=os.path.join(Train_dir,img)
    img=cv2.resize(cv2.imread(path,cv2.IMREAD_GRAYSCALE), 
  (img_size,img_size))
    training_data.append([np.array(img),np.array(label)])
    shuffle(training_data)
    np.save('train_data.npy',training_data)
    return training_data


def process_test_data():
for img in tqdm(os.listdir(Train_dir)):
    testing_data = []
label = label_img(img)
path = os.path.join(Train_dir, img)
img = cv2.resize(cv2.imread(path, cv2.IMREAD_GRAYSCALE), (img_size, 
img_size))
testing_data.append([np.array(img), np.array(label)])
shuffle(testing_data)
np.save('testing_data.npy', testing_data)
return testing_data


train_data=create_train_data()

Comments