-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsupplementary.py
93 lines (77 loc) · 3.31 KB
/
supplementary.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import os
import time
import pickle
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
df = pd.read_csv('./crop.csv')
labelencoder = LabelEncoder()
df['label_cat'] = labelencoder.fit_transform(df['label'])
with open('model_pickle', "rb") as f:
model = pickle.load(f)
op = webdriver.ChromeOptions()
# https://youtu.be/kiYmBvv94RY
# https://youtu.be/rfdNIOYGYVI
# https://youtu.be/U6gbGk5WPws
# https://youtu.be/b5jt2bhSeXs
# op.binary_location = os.environ.get("GOOGLE_CHROME_BIN")
op.binary_location = "C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe"
op.add_argument("--headless")
op.add_argument("--no-sandbox")
op.add_argument("--disable-dev-sh-usage")
op.add_experimental_option('excludeSwitches', ['enable-logging'])
# driver = webdriver.Chrome(executable_path=os.environ.get("CHROMEDRIVER_PATH"), chrome_options=op)
driver = webdriver.Chrome(
executable_path=r"C:\Program Files (x86)\chromedriver_win32\chromedriver.exe", chrome_options=op)
# PATH = r"C:\Program Files (x86)\chromedriver_win32\chromedriver.exe"
# options = webdriver.ChromeOptions()
# options.add_argument("start-maximized")
# # to supress the error messages/logs
# options.add_experimental_option('excludeSwitches', ['enable-logging'])
# driver = webdriver.Chrome(options=options, executable_path=PATH)
def recommend(temp, hum, ph, rain):
preds = list(model.predict([[temp, hum, ph, rain]])[0])
npk = preds[1:]
crop_index = round(preds[0])
mapper = dict(zip(labelencoder.classes_,
range(len(labelencoder.classes_))))
# The code for mapper is from https://stackoverflow.com/questions/42196589/any-way-to-get-mappings-of-a-label-encoder-in-python-pandas
crop = list(mapper.keys())[list(mapper.values()).index(int(crop_index))]
# The code for crop is from https://www.geeksforgeeks.org/python-get-key-from-value-in-dictionary/
return crop, npk
def get_disease(img_path):
driver.get(
"https://plant-disease-detection-ai.herokuapp.com/index")
upload_element = driver.find_element(
By.XPATH, '//*[@id="actual-btn"]')
upload_element.send_keys(img_path)
submit_element = driver.find_element(
By.XPATH,
"/html/body/div[2]/div/div[2]/div[2]/div/form/center/a/button")
submit_element.click()
try:
title_element = extract_values(
60, "/html/body/div[3]/div/div[1]/div/h1/b")
meta_des = extract_values(
10, "/html/body/div[3]/div/div[2]/div/div/h5/b")
des = extract_values(
10, "/html/body/div[3]/div/div[2]/div/div/p")
meta_treat = extract_values(
10, "/html/body/div[3]/div/div[3]/div/div/h5/b")
treat = extract_values(
10, "/html/body/div[3]/div/div[3]/div[1]/div/p")
return {"title": title_element,
"meta_des": meta_des,
"des": des,
"meta_treat": meta_treat,
"treat": treat}
except:
return True
def extract_values(t, p):
return WebDriverWait(driver, t).until(
EC.presence_of_all_elements_located((By.XPATH, f"{p}"))
)[0].text