@@ -17,19 +17,27 @@ def collect_dataset():
1717 The dataset contains ADR vs Rating of a Player
1818 :return : dataset obtained from the link, as matrix
1919 """
20- response = requests .get (
21- "https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
22- "master/Week1/ADRvsRating.csv" ,
23- timeout = 10 ,
24- )
25- lines = response .text .splitlines ()
26- data = []
27- for item in lines :
28- item = item .split ("," )
29- data .append (item )
30- data .pop (0 ) # This is for removing the labels from the list
31- dataset = np .matrix (data )
32- return dataset
20+ try :
21+ response = requests .get (
22+ "https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
23+ "master/Week1/ADRvsRating.csv" ,
24+ timeout = 10 ,
25+ )
26+ response .raise_for_status () # Check for HTTP errors
27+ lines = response .text .splitlines ()
28+ data = []
29+ for item in lines :
30+ item = item .split ("," )
31+ data .append (item )
32+ data .pop (0 ) # This is for removing the labels from the list
33+ dataset = np .matrix (data )
34+ return dataset
35+ except requests .exceptions .RequestException as e :
36+ print (f"Error in fetching dataset: { e } " )
37+ return None
38+ except Exception as e :
39+ print (f"Unexpected error in processing dataset: { e } " )
40+ return None
3341
3442
3543def run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta ):
@@ -42,13 +50,16 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta):
4250 ;param return : Updated Feature's, using
4351 curr_features - alpha_ * gradient(w.r.t. feature)
4452 """
45- n = len_data
46-
47- prod = np .dot (theta , data_x .transpose ())
48- prod -= data_y .transpose ()
49- sum_grad = np .dot (prod , data_x )
50- theta = theta - (alpha / n ) * sum_grad
51- return theta
53+ try :
54+ n = len_data
55+ prod = np .dot (theta , data_x .transpose ())
56+ prod -= data_y .transpose ()
57+ sum_grad = np .dot (prod , data_x )
58+ theta = theta - (alpha / n ) * sum_grad
59+ return theta
60+ except Exception as e :
61+ print (f"Error during gradient descent: { e } " )
62+ return None
5263
5364
5465def sum_of_square_error (data_x , data_y , len_data , theta ):
@@ -59,11 +70,15 @@ def sum_of_square_error(data_x, data_y, len_data, theta):
5970 :param theta : contains the feature vector
6071 :return : sum of square error computed from given feature's
6172 """
62- prod = np .dot (theta , data_x .transpose ())
63- prod -= data_y .transpose ()
64- sum_elem = np .sum (np .square (prod ))
65- error = sum_elem / (2 * len_data )
66- return error
73+ try :
74+ prod = np .dot (theta , data_x .transpose ())
75+ prod -= data_y .transpose ()
76+ sum_elem = np .sum (np .square (prod ))
77+ error = sum_elem / (2 * len_data )
78+ return error
79+ except Exception as e :
80+ print (f"Error in calculating sum of square error: { e } " )
81+ return None
6782
6883
6984def run_linear_regression (data_x , data_y ):
@@ -72,20 +87,31 @@ def run_linear_regression(data_x, data_y):
7287 :param data_y : contains the output (result vector)
7388 :return : feature for line of best fit (Feature vector)
7489 """
75- iterations = 100000
76- alpha = 0.0001550
90+ try :
91+ iterations = 100000
92+ alpha = 0.0001550
7793
78- no_features = data_x .shape [1 ]
79- len_data = data_x .shape [0 ] - 1
94+ no_features = data_x .shape [1 ]
95+ len_data = data_x .shape [0 ]
8096
81- theta = np .zeros ((1 , no_features ))
97+ theta = np .zeros ((1 , no_features ))
8298
83- for i in range (iterations ):
84- theta = run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta )
85- error = sum_of_square_error (data_x , data_y , len_data , theta )
86- print (f"At Iteration { i + 1 } - Error is { error :.5f} " )
99+ for i in range (iterations ):
100+ theta = run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta )
101+ if theta is None : # If gradient descent fails, exit
102+ print ("Gradient descent failed. Exiting." )
103+ return None
104+ error = sum_of_square_error (data_x , data_y , len_data , theta )
105+ if error is None : # If error calculation fails, exit
106+ print ("Error calculation failed. Exiting." )
107+ return None
108+ if i % 1000 == 0 : # Print every 1000 iterations
109+ print (f"At Iteration { i + 1 } - Error is { error :.5f} " )
87110
88- return theta
111+ return theta
112+ except Exception as e :
113+ print (f"Error in linear regression: { e } " )
114+ return None
89115
90116
91117def mean_absolute_error (predicted_y , original_y ):
@@ -94,23 +120,37 @@ def mean_absolute_error(predicted_y, original_y):
94120 :param original_y : contains values of expected outcome
95121 :return : mean absolute error computed from given feature's
96122 """
97- total = sum (abs (y - predicted_y [i ]) for i , y in enumerate (original_y ))
98- return total / len (original_y )
123+ try :
124+ total = sum (abs (y - predicted_y [i ]) for i , y in enumerate (original_y ))
125+ return total / len (original_y )
126+ except Exception as e :
127+ print (f"Error in calculating mean absolute error: { e } " )
128+ return None
99129
100130
101131def main ():
102132 """Driver function"""
103133 data = collect_dataset ()
104-
105- len_data = data .shape [0 ]
106- data_x = np .c_ [np .ones (len_data ), data [:, :- 1 ]].astype (float )
107- data_y = data [:, - 1 ].astype (float )
108-
109- theta = run_linear_regression (data_x , data_y )
110- len_result = theta .shape [1 ]
111- print ("Resultant Feature vector : " )
112- for i in range (len_result ):
113- print (f"{ theta [0 , i ]:.5f} " )
134+ if data is None :
135+ print ("Failed to collect or process the dataset. Exiting." )
136+ return
137+
138+ try :
139+ len_data = data .shape [0 ]
140+ data_x = np .c_ [np .ones (len_data ), data [:, :- 1 ]].astype (float )
141+ data_y = data [:, - 1 ].astype (float )
142+
143+ theta = run_linear_regression (data_x , data_y )
144+ if theta is None :
145+ print ("Linear regression failed. Exiting." )
146+ return
147+
148+ len_result = theta .shape [1 ]
149+ print ("Resultant Feature vector:" )
150+ for i in range (len_result ):
151+ print (f"{ theta [0 , i ]:.5f} " )
152+ except Exception as e :
153+ print (f"Unexpected error in main: { e } " )
114154
115155
116156if __name__ == "__main__" :
0 commit comments