diff --git a/.gitignore b/.gitignore index 073a0c9..19dbc18 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,6 @@ __pycache__ **/.DS_Store -notebooks/ \ No newline at end of file +notebooks/ + +.idea/ \ No newline at end of file diff --git a/models/Perceptron.py b/models/Perceptron.py new file mode 100644 index 0000000..d74af1c --- /dev/null +++ b/models/Perceptron.py @@ -0,0 +1,34 @@ +import streamlit as st +from sklearn.linear_model import Perceptron + + +def lp_param_selector(): + eta0 = st.slider("learning_rate", 0.001, 10.0, step=0.001, value=1.0) + max_iter = st.number_input("max_iter", 100, 2000, step=50, value=100) + + penalty = st.selectbox("penalty", options=["None", "l2", "l1", "elasticnet"]) + + if penalty in ["l2", "l1", "elasticnet"]: + alpha = st.slider("alpha", 0.00001, 0.001, step=0.00001, value=0.0001) + else: + alpha = 0.0001 + + early_stopping = st.checkbox('early_stopping', value=False) + + if early_stopping: + validation_fraction = st.number_input("validation_fraction", 0.0, 1.0, step=0.05, value=0.1) + n_iter_no_change = st.number_input("n_iter_no_change", 2, 100, step=1, value=5) + else: + validation_fraction = 0.1 + n_iter_no_change = 5 + + params = {"eta0": eta0, + "max_iter": max_iter, + "penalty": penalty, + "alpha": alpha, + "early_stopping": early_stopping, + "validation_fraction": validation_fraction, + "n_iter_no_change": n_iter_no_change} + + model = Perceptron(**params) + return model diff --git a/models/utils.py b/models/utils.py index 85316bc..9b6446b 100644 --- a/models/utils.py +++ b/models/utils.py @@ -7,6 +7,7 @@ "K Nearest Neighbors": "from sklearn.neighbors import KNeighborsClassifier", "Gaussian Naive Bayes": "from sklearn.naive_bayes import GaussianNB", "SVC": "from sklearn.svm import SVC", + "Perceptron": "from sklearn.linear_model import Perceptron", } @@ -19,6 +20,7 @@ "K Nearest Neighbors": "https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html", "Gaussian Naive Bayes": "https://scikit-learn.org/stable/modules/naive_bayes.html", "SVC": "https://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html", + "Perceptron": "https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.Perceptron.html", } @@ -63,4 +65,10 @@ - They provide different type of kernel functions - They require careful normalization """, + "Perceptron": """ + - Perceptron is the simples form of Artificial Neural Network + - Single-layer Perceptrons require linearly separable data, otherwise it does not converge + - Multilayer Perceptrons (MLP) are feedforward neural networks with two or more layers which have the greater + processing power and can process non-linearly separable data. + """, } diff --git a/utils/ui.py b/utils/ui.py index 42ee2f0..df27578 100644 --- a/utils/ui.py +++ b/utils/ui.py @@ -10,6 +10,7 @@ from models.KNearesNeighbors import knn_param_selector from models.SVC import svc_param_selector from models.GradientBoosting import gb_param_selector +from models.Perceptron import lp_param_selector from models.utils import model_imports from utils.functions import img_to_bytes @@ -83,6 +84,7 @@ def model_selector(): "K Nearest Neighbors", "Gaussian Naive Bayes", "SVC", + "Perceptron", ), ) @@ -110,6 +112,9 @@ def model_selector(): elif model_type == "Gradient Boosting": model = gb_param_selector() + elif model_type == "Perceptron": + model = lp_param_selector() + return model_type, model