@@ -17,19 +17,27 @@ def collect_dataset():
17
17
The dataset contains ADR vs Rating of a Player
18
18
:return : dataset obtained from the link, as matrix
19
19
"""
20
- response = requests .get (
21
- "https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
22
- "master/Week1/ADRvsRating.csv" ,
23
- timeout = 10 ,
24
- )
25
- lines = response .text .splitlines ()
26
- data = []
27
- for item in lines :
28
- item = item .split ("," )
29
- data .append (item )
30
- data .pop (0 ) # This is for removing the labels from the list
31
- dataset = np .matrix (data )
32
- return dataset
20
+ try :
21
+ response = requests .get (
22
+ "https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
23
+ "master/Week1/ADRvsRating.csv" ,
24
+ timeout = 10 ,
25
+ )
26
+ response .raise_for_status () # Check for HTTP errors
27
+ lines = response .text .splitlines ()
28
+ data = []
29
+ for item in lines :
30
+ item = item .split ("," )
31
+ data .append (item )
32
+ data .pop (0 ) # This is for removing the labels from the list
33
+ dataset = np .matrix (data )
34
+ return dataset
35
+ except requests .exceptions .RequestException as e :
36
+ print (f"Error in fetching dataset: { e } " )
37
+ return None
38
+ except Exception as e :
39
+ print (f"Unexpected error in processing dataset: { e } " )
40
+ return None
33
41
34
42
35
43
def run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta ):
@@ -42,13 +50,16 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta):
42
50
;param return : Updated Feature's, using
43
51
curr_features - alpha_ * gradient(w.r.t. feature)
44
52
"""
45
- n = len_data
46
-
47
- prod = np .dot (theta , data_x .transpose ())
48
- prod -= data_y .transpose ()
49
- sum_grad = np .dot (prod , data_x )
50
- theta = theta - (alpha / n ) * sum_grad
51
- return theta
53
+ try :
54
+ n = len_data
55
+ prod = np .dot (theta , data_x .transpose ())
56
+ prod -= data_y .transpose ()
57
+ sum_grad = np .dot (prod , data_x )
58
+ theta = theta - (alpha / n ) * sum_grad
59
+ return theta
60
+ except Exception as e :
61
+ print (f"Error during gradient descent: { e } " )
62
+ return None
52
63
53
64
54
65
def sum_of_square_error (data_x , data_y , len_data , theta ):
@@ -59,11 +70,15 @@ def sum_of_square_error(data_x, data_y, len_data, theta):
59
70
:param theta : contains the feature vector
60
71
:return : sum of square error computed from given feature's
61
72
"""
62
- prod = np .dot (theta , data_x .transpose ())
63
- prod -= data_y .transpose ()
64
- sum_elem = np .sum (np .square (prod ))
65
- error = sum_elem / (2 * len_data )
66
- return error
73
+ try :
74
+ prod = np .dot (theta , data_x .transpose ())
75
+ prod -= data_y .transpose ()
76
+ sum_elem = np .sum (np .square (prod ))
77
+ error = sum_elem / (2 * len_data )
78
+ return error
79
+ except Exception as e :
80
+ print (f"Error in calculating sum of square error: { e } " )
81
+ return None
67
82
68
83
69
84
def run_linear_regression (data_x , data_y ):
@@ -72,20 +87,31 @@ def run_linear_regression(data_x, data_y):
72
87
:param data_y : contains the output (result vector)
73
88
:return : feature for line of best fit (Feature vector)
74
89
"""
75
- iterations = 100000
76
- alpha = 0.0001550
90
+ try :
91
+ iterations = 100000
92
+ alpha = 0.0001550
77
93
78
- no_features = data_x .shape [1 ]
79
- len_data = data_x .shape [0 ] - 1
94
+ no_features = data_x .shape [1 ]
95
+ len_data = data_x .shape [0 ]
80
96
81
- theta = np .zeros ((1 , no_features ))
97
+ theta = np .zeros ((1 , no_features ))
82
98
83
- for i in range (iterations ):
84
- theta = run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta )
85
- error = sum_of_square_error (data_x , data_y , len_data , theta )
86
- print (f"At Iteration { i + 1 } - Error is { error :.5f} " )
99
+ for i in range (iterations ):
100
+ theta = run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta )
101
+ if theta is None : # If gradient descent fails, exit
102
+ print ("Gradient descent failed. Exiting." )
103
+ return None
104
+ error = sum_of_square_error (data_x , data_y , len_data , theta )
105
+ if error is None : # If error calculation fails, exit
106
+ print ("Error calculation failed. Exiting." )
107
+ return None
108
+ if i % 1000 == 0 : # Print every 1000 iterations
109
+ print (f"At Iteration { i + 1 } - Error is { error :.5f} " )
87
110
88
- return theta
111
+ return theta
112
+ except Exception as e :
113
+ print (f"Error in linear regression: { e } " )
114
+ return None
89
115
90
116
91
117
def mean_absolute_error (predicted_y , original_y ):
@@ -94,23 +120,37 @@ def mean_absolute_error(predicted_y, original_y):
94
120
:param original_y : contains values of expected outcome
95
121
:return : mean absolute error computed from given feature's
96
122
"""
97
- total = sum (abs (y - predicted_y [i ]) for i , y in enumerate (original_y ))
98
- return total / len (original_y )
123
+ try :
124
+ total = sum (abs (y - predicted_y [i ]) for i , y in enumerate (original_y ))
125
+ return total / len (original_y )
126
+ except Exception as e :
127
+ print (f"Error in calculating mean absolute error: { e } " )
128
+ return None
99
129
100
130
101
131
def main ():
102
132
"""Driver function"""
103
133
data = collect_dataset ()
104
-
105
- len_data = data .shape [0 ]
106
- data_x = np .c_ [np .ones (len_data ), data [:, :- 1 ]].astype (float )
107
- data_y = data [:, - 1 ].astype (float )
108
-
109
- theta = run_linear_regression (data_x , data_y )
110
- len_result = theta .shape [1 ]
111
- print ("Resultant Feature vector : " )
112
- for i in range (len_result ):
113
- print (f"{ theta [0 , i ]:.5f} " )
134
+ if data is None :
135
+ print ("Failed to collect or process the dataset. Exiting." )
136
+ return
137
+
138
+ try :
139
+ len_data = data .shape [0 ]
140
+ data_x = np .c_ [np .ones (len_data ), data [:, :- 1 ]].astype (float )
141
+ data_y = data [:, - 1 ].astype (float )
142
+
143
+ theta = run_linear_regression (data_x , data_y )
144
+ if theta is None :
145
+ print ("Linear regression failed. Exiting." )
146
+ return
147
+
148
+ len_result = theta .shape [1 ]
149
+ print ("Resultant Feature vector:" )
150
+ for i in range (len_result ):
151
+ print (f"{ theta [0 , i ]:.5f} " )
152
+ except Exception as e :
153
+ print (f"Unexpected error in main: { e } " )
114
154
115
155
116
156
if __name__ == "__main__" :
0 commit comments