Skip to content
Closed
Changes from 1 commit
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
31faa5f
added my code in and folder
IBR-41379 Oct 10, 2023
bb40ab3
Appropriated the name of the existing program according to the repo i…
IBR-41379 Oct 10, 2023
578b159
Rename Simple_Adaline.py to simple_adaline.py
IBR-41379 Oct 10, 2023
0154cd4
Chaged the name of yhe folder according to the convention.
IBR-41379 Oct 10, 2023
c08165b
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 10, 2023
5f373c1
Update simple_adaline.py
IBR-41379 Oct 10, 2023
b3a7404
Rename Churn_Modelling.csv to churn_modelling.csv
IBR-41379 Oct 10, 2023
d5af854
Update churn_cal.py
IBR-41379 Oct 10, 2023
669cf9d
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 10, 2023
df219a0
Update simple_adaline.py
IBR-41379 Oct 10, 2023
7dd3fb5
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 10, 2023
537d850
Update churn_cal.py
IBR-41379 Oct 10, 2023
38a8a0b
Update simple_adaline.py
IBR-41379 Oct 10, 2023
e0f8aa1
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 10, 2023
52b868f
Made appropriate changes to churn_cal
IBR-41379 Oct 11, 2023
6239124
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 11, 2023
e60546a
Made some changes to pass the test
IBR-41379 Oct 11, 2023
86af749
Made changes to pass the test
IBR-41379 Oct 11, 2023
ceed900
Made changes to the file to pass the ruff test
IBR-41379 Oct 11, 2023
4156be4
Modified file according to requirement
IBR-41379 Oct 11, 2023
03e5235
Update machine_learning/decision_tree_churn/churn_cal.py
IBR-41379 Oct 11, 2023
87e3e8b
Update machine_learning/decision_tree_churn/churn_cal.py
IBR-41379 Oct 11, 2023
632be92
Apply suggestions from code review
cclauss Oct 11, 2023
b62c9b4
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 11, 2023
9800d56
Update churn_cal.py
cclauss Oct 11, 2023
94e542d
appropriate changes made to both the files
IBR-41379 Oct 11, 2023
dd0ae6b
Made appropriate changes to churn_cal
IBR-41379 Oct 11, 2023
811fdf2
Made some more appropriate changes
IBR-41379 Oct 11, 2023
266edfe
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 11, 2023
38f492c
made changes for the tests
IBR-41379 Oct 11, 2023
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update simple_adaline.py
  • Loading branch information
IBR-41379 authored Oct 10, 2023
commit 5f373c1dc76e58de050c12b8daec3278f09ebf23
143 changes: 86 additions & 57 deletions neural_network/simple_adaline.py
Original file line number Diff line number Diff line change
@@ -1,81 +1,110 @@
def wght_cng_or(wgt, T, al):
O = wgt[0] * 0 + wgt[1] * 0
if O <= T:
Ol = wgt[0] * 0 + wgt[1] * 1
if Ol >= T:
Ole = wgt[0] * 1 + wgt[1] * 0
if Ole >= T:
Ola = wgt[0] * 1 + wgt[1] * 1
if Ola >= T:
return wgt

from typing import List, Tuple

def weight_change_or(weight: List[float], threshold: float, learning_rate: float) -> List[float]:
output = weight[0]*0 + weight[1]*0
if output <= threshold:
output_left = weight[0]*0 + weight[1]*1
if output_left >= threshold:
output_left_down = weight[0]*1 + weight[1]*0
if output_left_down >= threshold:
output_all = weight[0]*1 + weight[1]*1
if output_all >= threshold:
return weight
else:
wgt[0] = wgt[0] + al * 1 * 1
wgt[1] = wgt[1] + al * 1 * 1
return wght_cng_or(wgt, T, al)
weight[0] = weight[0] + learning_rate*1*1
weight[1] = weight[1] + learning_rate*1*1
return weight_change_or(weight, threshold, learning_rate)
else:
wgt[0] = wgt[0] + al * 1 * 1
wgt[1] = wgt[1] + al * 1 * 0
return wght_cng_or(wgt, T, al)
weight[0] = weight[0] + learning_rate*1*1
weight[1] = weight[1] + learning_rate*1*0
return weight_change_or(weight, threshold, learning_rate)
else:
wgt[0] = wgt[0] + al * 1 * 0
wgt[1] = wgt[1] + al * 1 * 1
return wght_cng_or(wgt, T, al)
weight[0] = weight[0] + learning_rate*1*0
weight[1] = weight[1] + learning_rate*1*1
return weight_change_or(weight, threshold, learning_rate)
else:
T += al
return wght_cng_or(wgt, T, al)
threshold += learning_rate
return weight_change_or(weight, threshold, learning_rate)


def wght_cng_and(wgt, T, al):
O = wgt[0] * 0 + wgt[1] * 0
if O <= T:
Ol = wgt[0] * 0 + wgt[1] * 1
if Ol <= T:
Ole = wgt[0] * 1 + wgt[1] * 0
if Ole <= T:
Ola = wgt[0] * 1 + wgt[1] * 1
if Ola >= T:
return wgt
def weight_change_and(weight: List[float], threshold: float, learning_rate: float) -> List[float]:
output = weight[0]*0 + weight[1]*0
if output <= threshold:
output_left = weight[0]*0 + weight[1]*1
if output_left <= threshold:
output_left_down = weight[0]*1 + weight[1]*0
if output_left_down <= threshold:
output_all = weight[0]*1 + weight[1]*1
if output_all >= threshold:
return weight
else:
wgt[0] = wgt[0] + (al * 1 * 1)
wgt[1] = wgt[1] + (al * 1 * 1)
return wght_cng_and(wgt, T, al)
weight[0] = weight[0] + (learning_rate*1*1)
weight[1] = weight[1] + (learning_rate*1*1)
return weight_change_and(weight, threshold, learning_rate)
else:
wgt[0] = wgt[0] - (al * 1 * 1)
wgt[1] = wgt[1] - (al * 1 * 0)
return wght_cng_and(wgt, T, al)
weight[0] = weight[0] - (learning_rate*1*1)
weight[1] = weight[1] - (learning_rate*1*0)
return weight_change_and(weight, threshold, learning_rate)
else:
wgt[0] = wgt[0] - (al * 1 * 0)
wgt[1] = wgt[1] - (al * 1 * 1)
return wght_cng_and(wgt, T, al)
weight[0] = weight[0] - (learning_rate*1*0)
weight[1] = weight[1] - (learning_rate*1*1)
return weight_change_and(weight, threshold, learning_rate)
else:
T += al
return wght_cng_and(wgt, T, al)
threshold += learning_rate
return weight_change_and(weight, threshold, learning_rate)


def and_gate(weight: List[float], input_a: int, input_b: int, threshold: float, learning_rate: float) -> int:
"""
This function implements the AND gate using the Adaline algorithm.

def and_gate(wgt, A, B, T, al):
wgt = wght_cng_and(wgt, T, al)
O = wgt[0] * A + wgt[1] * B
if O >= T:
Args:
weight (List[float]): The weights for the Adaline algorithm.
input_a (int): The first input value.
input_b (int): The second input value.
threshold (float): The threshold value for the Adaline algorithm.
learning_rate (float): The learning rate for the Adaline algorithm.

Returns:
int: The output of the AND gate.
"""
weight = weight_change_and(weight, threshold, learning_rate)
output = weight[0]*input_a + weight[1]*input_b
if output >= threshold:
return 1
else:
return 0


def or_gate(wgt, A, B, T, al):
wgt = wght_cng_or(wgt, T, al)
O = wgt[0] * A + wgt[1] * B
if O >= T:
def or_gate(weight: List[float], input_a: int, input_b: int, threshold: float, learning_rate: float) -> int:
"""
This function implements the OR gate using the Adaline algorithm.

Args:
weight (List[float]): The weights for the Adaline algorithm.
input_a (int): The first input value.
input_b (int): The second input value.
threshold (float): The threshold value for the Adaline algorithm.
learning_rate (float): The learning rate for the Adaline algorithm.

Returns:
int: The output of the OR gate.
"""
weight = weight_change_or(weight, threshold, learning_rate)
output = weight[0]*input_a + weight[1]*input_b
if output >= threshold:
return 1
else:
return 0


weight = [1.2, 0.6]
weight2 = [1.2, 0.6]
T = 1
al = 0.5
A, B = input("Input the value of A and B:").split()
A = int(A)
B = int(B)
print("\nThe output of OR is:", or_gate(weight, A, B, T, al))
print("\nThe output of AND is:", and_gate(weight2, A, B, T, al))
threshold = 1
learning_rate = 0.5
input_a, input_b = input("Input the value of A and B:").split()
input_a = int(input_a)
input_b = int(input_b)
print("\nThe output of OR is:", or_gate(weight, input_a, input_b, threshold, learning_rate))
print("\nThe output of AND is:", and_gate(weight2, input_a, input_b, threshold, learning_rate))