Skip to content

Instantly share code, notes, and snippets.

@Eligijus112
Created March 21, 2021 16:28
Show Gist options
  • Save Eligijus112/1d3eec680a0265e677a901f6fe0e4761 to your computer and use it in GitHub Desktop.
Save Eligijus112/1d3eec680a0265e677a901f6fe0e4761 to your computer and use it in GitHub Desktop.
Speed compare between CPU and GPU xgboost
# Defining the dictionaries
cpu_dict = {
'objective': 'reg:squarederror'
}
gpu_dict = {
'objective': 'reg:squarederror',
'tree_method': 'gpu_hist'
}
# Number of rows to train on
nrows = [
10,
100,
200,
3000,
5000,
10000,
20000,
50000,
100000,
300000
]
cpu_speeds = []
gpu_speeds = []
for nrow in nrows:
# Sampling random rows
sample = d.sample(nrow)
# Creating X and Y
X = pd.get_dummies(sample[features])
Y = sample['Sales']
# Initiating the model objects
cpu = xgb.XGBRegressor(**cpu_dict)
gpu = xgb.XGBRegressor(**gpu_dict)
# Training on cpu
start = time.time()
cpu.fit(X, Y)
cpu_speed = time.time() - start
# Training on gpu
start = time.time()
gpu.fit(X, Y)
gpu_speed = time.time() - start
# Appending the speed results
cpu_speeds.append(cpu_speed)
gpu_speeds.append(gpu_speed)
# Creating a dataframe
speeds = pd.DataFrame({
'nrows': nrows,
'cpu_speeds': cpu_speeds,
'gpu_speeds': gpu_speeds
})
speeds['difference'] = speeds['cpu_speeds'] - speeds['gpu_speeds']
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment