Skip to content
This repository was archived by the owner on Apr 28, 2023. It is now read-only.

Commit a90403a

Browse files
author
Jules Pondard
committed
Implement a gradient boosting execution time predictor
Generates a set of random options and tries to predict the execution time.
1 parent 29c8563 commit a90403a

File tree

1 file changed

+62
-0
lines changed

1 file changed

+62
-0
lines changed
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import time
2+
import torch
3+
import tensor_comprehensions as tc
4+
#import sklearn
5+
#from sklearn.linear_model import LinearRegression
6+
#from sklearn.ensemble import GradientBoostingRegressor
7+
import numpy as np
8+
#from sklearn.model_selection import train_test_split
9+
#from tensor_comprehensions.mapping_options import Options
10+
from multiprocessing import Pool
11+
from itertools import repeat
12+
import my_utils
13+
#from tqdm import tqdm
14+
15+
(tc_code, tc_name, inp, init_input_sz) = my_utils.get_convolution_example(size_type="input", inp_sz_list=[8,2,28,28,8,1,1])
16+
17+
my_utils.computeCat(inp)
18+
my_utils.set_tc(tc_code, tc_name)
19+
NB_HYPERPARAMS, INIT_INPUT_SZ = my_utils.NB_HYPERPARAMS, my_utils.INIT_INPUT_SZ
20+
21+
def createY(x):
22+
y = my_utils.evalTime(x)
23+
return y
24+
25+
def getRandom():
26+
opt_v = np.zeros(NB_HYPERPARAMS).astype(int)
27+
for i in range(opt_v.shape[0]):
28+
opt_v[i] = np.random.randint(my_utils.cat_sz[i])
29+
return opt_v
30+
31+
def makeDataset():
32+
from tqdm import tqdm
33+
sz = 500
34+
datasetX, datasetY = [], []
35+
for _ in tqdm(range(sz)):
36+
opt = getRandom()
37+
yi = createY(opt)
38+
datasetX.append(opt)
39+
datasetY.append(yi)
40+
#with Pool(sz) as p:
41+
# datasetY = p.starmap(createY, datasetX)
42+
return np.array(datasetX), np.array(datasetY)
43+
44+
def learn():
45+
#from sklearn.linear_model import LinearRegression
46+
from sklearn.ensemble import GradientBoostingRegressor
47+
from sklearn.model_selection import train_test_split
48+
datasetX, datasetY = makeDataset()
49+
print(min(datasetY))
50+
Xtrain, Xtest, Ytrain, Ytest = train_test_split(datasetX, datasetY, test_size=0.2, random_state = 42)
51+
model1 = GradientBoostingRegressor(n_estimators=1000)
52+
model1.fit(Xtrain, Ytrain)
53+
pred0 = model1.predict(Xtrain)
54+
pred1 = model1.predict(Xtest)
55+
#score0 = model1.score(Xtrain, Ytrain)
56+
#score1 = model1.score(Xtest, Ytest)
57+
#print(score0)
58+
#print(score1)
59+
print(np.corrcoef(pred0, Ytrain)[0, 1]**2)
60+
print(np.corrcoef(pred1, Ytest)[0,1]**2)
61+
62+
#learn()

0 commit comments

Comments
 (0)