import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
bike_train_data= pd.read_csv('Data/train.csv')
bike_test_data= pd.read_csv('Data/test.csv')
print('shape of training data',bike_train_data.shape)
print('shape of testing data',bike_test_data.shape)
shape of training data (10886, 12) shape of testing data (6493, 9)
bike_train_data.head()
datetime | season | holiday | workingday | weather | temp | atemp | humidity | windspeed | casual | registered | count | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 2011-01-01 00:00:00 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 81 | 0.0 | 3 | 13 | 16 |
1 | 2011-01-01 01:00:00 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0 | 8 | 32 | 40 |
2 | 2011-01-01 02:00:00 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0 | 5 | 27 | 32 |
3 | 2011-01-01 03:00:00 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0 | 3 | 10 | 13 |
4 | 2011-01-01 04:00:00 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0 | 0 | 1 | 1 |
bike_test_data.head()
datetime | season | holiday | workingday | weather | temp | atemp | humidity | windspeed | |
---|---|---|---|---|---|---|---|---|---|
0 | 2011-01-20 00:00:00 | 1 | 0 | 1 | 1 | 10.66 | 11.365 | 56 | 26.0027 |
1 | 2011-01-20 01:00:00 | 1 | 0 | 1 | 1 | 10.66 | 13.635 | 56 | 0.0000 |
2 | 2011-01-20 02:00:00 | 1 | 0 | 1 | 1 | 10.66 | 13.635 | 56 | 0.0000 |
3 | 2011-01-20 03:00:00 | 1 | 0 | 1 | 1 | 10.66 | 12.880 | 56 | 11.0014 |
4 | 2011-01-20 04:00:00 | 1 | 0 | 1 | 1 | 10.66 | 12.880 | 56 | 11.0014 |
bike_train_data.columns
Index(['datetime', 'season', 'holiday', 'workingday', 'weather', 'temp', 'atemp', 'humidity', 'windspeed', 'casual', 'registered', 'count'], dtype='object')
bike_train_data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 10886 entries, 0 to 10885 Data columns (total 12 columns): datetime 10886 non-null object season 10886 non-null int64 holiday 10886 non-null int64 workingday 10886 non-null int64 weather 10886 non-null int64 temp 10886 non-null float64 atemp 10886 non-null float64 humidity 10886 non-null int64 windspeed 10886 non-null float64 casual 10886 non-null int64 registered 10886 non-null int64 count 10886 non-null int64 dtypes: float64(3), int64(8), object(1) memory usage: 1020.7+ KB
bike_test_data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 6493 entries, 0 to 6492 Data columns (total 9 columns): datetime 6493 non-null object season 6493 non-null int64 holiday 6493 non-null int64 workingday 6493 non-null int64 weather 6493 non-null int64 temp 6493 non-null float64 atemp 6493 non-null float64 humidity 6493 non-null int64 windspeed 6493 non-null float64 dtypes: float64(3), int64(5), object(1) memory usage: 456.7+ KB
bike_train_data.isnull().sum()
datetime 0 season 0 holiday 0 workingday 0 weather 0 temp 0 atemp 0 humidity 0 windspeed 0 casual 0 registered 0 count 0 dtype: int64
bike_test_data.isnull().sum()
datetime 0 season 0 holiday 0 workingday 0 weather 0 temp 0 atemp 0 humidity 0 windspeed 0 dtype: int64
df=bike_train_data.copy()
df.head()
datetime | season | holiday | workingday | weather | temp | atemp | humidity | windspeed | casual | registered | count | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 2011-01-01 00:00:00 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 81 | 0.0 | 3 | 13 | 16 |
1 | 2011-01-01 01:00:00 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0 | 8 | 32 | 40 |
2 | 2011-01-01 02:00:00 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0 | 5 | 27 | 32 |
3 | 2011-01-01 03:00:00 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0 | 3 | 10 | 13 |
4 | 2011-01-01 04:00:00 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0 | 0 | 1 | 1 |
df['datetime']=pd.to_datetime(df['datetime'],format='%Y-%m-%d %H:%M:%S')
#add new columns from the datetime column
df['year'] = df['datetime'].dt.year
df['month'] = df['datetime'].dt.month
df['day'] = df['datetime'].dt.day
df['hour'] = df['datetime'].dt.hour
df['dayofweek'] = df['datetime'].dt.dayofweek
df.drop(["datetime"],axis=1,inplace=True)
df
season | holiday | workingday | weather | temp | atemp | humidity | windspeed | casual | registered | count | year | month | day | hour | dayofweek | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 81 | 0.0000 | 3 | 13 | 16 | 2011 | 1 | 1 | 0 | 5 |
1 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0000 | 8 | 32 | 40 | 2011 | 1 | 1 | 1 | 5 |
2 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0000 | 5 | 27 | 32 | 2011 | 1 | 1 | 2 | 5 |
3 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0000 | 3 | 10 | 13 | 2011 | 1 | 1 | 3 | 5 |
4 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0000 | 0 | 1 | 1 | 2011 | 1 | 1 | 4 | 5 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
10881 | 4 | 0 | 1 | 1 | 15.58 | 19.695 | 50 | 26.0027 | 7 | 329 | 336 | 2012 | 12 | 19 | 19 | 2 |
10882 | 4 | 0 | 1 | 1 | 14.76 | 17.425 | 57 | 15.0013 | 10 | 231 | 241 | 2012 | 12 | 19 | 20 | 2 |
10883 | 4 | 0 | 1 | 1 | 13.94 | 15.910 | 61 | 15.0013 | 4 | 164 | 168 | 2012 | 12 | 19 | 21 | 2 |
10884 | 4 | 0 | 1 | 1 | 13.94 | 17.425 | 61 | 6.0032 | 12 | 117 | 129 | 2012 | 12 | 19 | 22 | 2 |
10885 | 4 | 0 | 1 | 1 | 13.12 | 16.665 | 66 | 8.9981 | 4 | 84 | 88 | 2012 | 12 | 19 | 23 | 2 |
10886 rows × 16 columns
df.describe()
season | holiday | workingday | weather | temp | atemp | humidity | windspeed | casual | registered | count | year | month | day | hour | dayofweek | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
count | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.00000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 | 10886.000000 |
mean | 2.506614 | 0.028569 | 0.680875 | 1.418427 | 20.23086 | 23.655084 | 61.886460 | 12.799395 | 36.021955 | 155.552177 | 191.574132 | 2011.501929 | 6.521495 | 9.992559 | 11.541613 | 3.013963 |
std | 1.116174 | 0.166599 | 0.466159 | 0.633839 | 7.79159 | 8.474601 | 19.245033 | 8.164537 | 49.960477 | 151.039033 | 181.144454 | 0.500019 | 3.444373 | 5.476608 | 6.915838 | 2.004585 |
min | 1.000000 | 0.000000 | 0.000000 | 1.000000 | 0.82000 | 0.760000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 1.000000 | 2011.000000 | 1.000000 | 1.000000 | 0.000000 | 0.000000 |
25% | 2.000000 | 0.000000 | 0.000000 | 1.000000 | 13.94000 | 16.665000 | 47.000000 | 7.001500 | 4.000000 | 36.000000 | 42.000000 | 2011.000000 | 4.000000 | 5.000000 | 6.000000 | 1.000000 |
50% | 3.000000 | 0.000000 | 1.000000 | 1.000000 | 20.50000 | 24.240000 | 62.000000 | 12.998000 | 17.000000 | 118.000000 | 145.000000 | 2012.000000 | 7.000000 | 10.000000 | 12.000000 | 3.000000 |
75% | 4.000000 | 0.000000 | 1.000000 | 2.000000 | 26.24000 | 31.060000 | 77.000000 | 16.997900 | 49.000000 | 222.000000 | 284.000000 | 2012.000000 | 10.000000 | 15.000000 | 18.000000 | 5.000000 |
max | 4.000000 | 1.000000 | 1.000000 | 4.000000 | 41.00000 | 45.455000 | 100.000000 | 56.996900 | 367.000000 | 886.000000 | 977.000000 | 2012.000000 | 12.000000 | 19.000000 | 23.000000 | 6.000000 |
df.columns
Index(['season', 'holiday', 'workingday', 'weather', 'temp', 'atemp', 'humidity', 'windspeed', 'casual', 'registered', 'count', 'year', 'month', 'day', 'hour', 'dayofweek'], dtype='object')
%matplotlib inline
import seaborn as sns
def gen_all_plots(xlist,ylist,df):
fig,axs = plt.subplots(len(xlist),len(ylist),figsize=(20,5))
rows=len(xlist)
cols=len(ylist)
color=['red','green','blue']
if rows==1:
i=0
x=xlist[i]
if cols==1:
j=0
y=ylist[j]
ax=axs
temp=df[[x,y]].groupby([x]).agg({y :'sum'})
p=sns.barplot(x=temp.index,y=y,data=temp,ax=ax)
ax.set_title('{} vs {}'.format(x,y))
else:
for j in range(cols):
y=ylist[j]
ax=axs[j]
temp=df[[x,y]].groupby([x]).agg({y :'sum'})
p=sns.barplot(x=temp.index,y=y,data=temp,ax=ax)
ax.set_title('{} vs {}'.format(x,y))
else:
for i in range(rows):
x=xlist[i]
if cols==1:
j=0
y=ylist[j]
ax=axs[i]
temp=df[[x,y]].groupby([x]).agg({y :'sum'})
p=sns.barplot(x=temp.index,y=y,data=temp,ax=ax)
ax.set_title('{} vs {}'.format(x,y))
else:
for j in range(cols):
y=ylist[j]
ax=axs[i,j]
temp=df[[x,y]].groupby([x]).agg({y :'sum'})
p=sns.barplot(x=temp.index,y=y,data=temp,ax=ax)
ax.set_title('{} vs {}'.format(x,y))
plt.show()
xlist=['season']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['humidity']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['temp','atemp']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['windspeed']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['year']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['dayofweek']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['month']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['day']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
xlist=['hour']
ylist=[ 'casual', 'registered', 'count']
gen_all_plots(xlist,ylist,df)
fig, ax = plt.subplots(1,7, figsize=(50,7), sharey=True)
days=["Monday", "Tuesday", "Wednesday", "Thursday", "Friday","Saturday", "Sunday"]
for i in range(7):
#split data according to week-day
day_data= df[df["dayofweek"] == i]
#plot day's dstribution
sns.barplot(x = "hour", y = "count", data = day_data, ax = ax[i]).set_title("Booking Distribution by Hours on " + days[i])
fig.show()
D:\Anaconda3\lib\site-packages\ipykernel_launcher.py:10: UserWarning: Matplotlib is currently using module://ipykernel.pylab.backend_inline, which is a non-GUI backend, so cannot show the figure. # Remove the CWD from sys.path while we load stuff.
fig, ax = plt.subplots(1,7, figsize=(50,7), sharey=True)
days=["Monday", "Tuesday", "Wednesday", "Thursday", "Friday","Saturday", "Sunday"]
for i in range(7):
#split data according to week-day
day_data= df[df["dayofweek"] == i]
#plot day's dstribution
sns.barplot(x = "hour", y = "registered", data = day_data, ax = ax[i]).set_title("Booking Distribution by Hours on " + days[i])
fig.show()
D:\Anaconda3\lib\site-packages\ipykernel_launcher.py:10: UserWarning: Matplotlib is currently using module://ipykernel.pylab.backend_inline, which is a non-GUI backend, so cannot show the figure. # Remove the CWD from sys.path while we load stuff.
fig, ax = plt.subplots(1,7, figsize=(50,7), sharey=True)
days=["Monday", "Tuesday", "Wednesday", "Thursday", "Friday","Saturday", "Sunday"]
for i in range(7):
#split data according to week-day
day_data= df[df["dayofweek"] == i]
#plot day's dstribution
sns.barplot(x = "hour", y = "casual", data = day_data, ax = ax[i]).set_title("Booking Distribution by Hours on " + days[i])
fig.show()
D:\Anaconda3\lib\site-packages\ipykernel_launcher.py:10: UserWarning: Matplotlib is currently using module://ipykernel.pylab.backend_inline, which is a non-GUI backend, so cannot show the figure. # Remove the CWD from sys.path while we load stuff.
df[['season','temp','workingday','weather']].boxplot()
<matplotlib.axes._subplots.AxesSubplot at 0x236d1b66048>
df[['atemp','humidity','windspeed']].boxplot()
<matplotlib.axes._subplots.AxesSubplot at 0x236d295d2c8>
df[['casual','registered','count']].boxplot()
<matplotlib.axes._subplots.AxesSubplot at 0x236d1fb07c8>
fig, ax = plt.subplots(3,2, figsize=(16,15))
#casual customers distribution plots
sns.distplot(df["casual"],ax=ax[0][0])
df.boxplot("casual", ax=ax[0][1])
#registered customers distribution plots
sns.distplot(df["registered"], ax=ax[1][0])
df.boxplot("registered", ax=ax[1][1])
#all customers distribution plots
sns.distplot(df["count"], ax=ax[2][0])
df.boxplot("count", ax=ax[2][1])
fig.show()
C:\Users\affine\AppData\Roaming\Python\Python37\site-packages\seaborn\distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) C:\Users\affine\AppData\Roaming\Python\Python37\site-packages\seaborn\distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) C:\Users\affine\AppData\Roaming\Python\Python37\site-packages\seaborn\distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) D:\Anaconda3\lib\site-packages\ipykernel_launcher.py:15: UserWarning: Matplotlib is currently using module://ipykernel.pylab.backend_inline, which is a non-GUI backend, so cannot show the figure. from ipykernel import kernelapp as app
#normalize
df['casual_log'] = np.log((1+ df['casual']))
df['registered_log'] = np.log((1+ df['registered']))
df['count_log'] = np.log((1+ df['count']))
df['windspeed_log']=np.log((1+df['weather']))
fig, ax = plt.subplots(3,2, figsize=(16,15))
#casual customers distribution plots
sns.distplot(df["casual_log"],ax=ax[0][0])
df.boxplot("casual_log", ax=ax[0][1])
#registered customers distribution plots
sns.distplot(df["registered_log"], ax=ax[1][0])
df.boxplot("registered_log", ax=ax[1][1])
#all customers distribution plots
sns.distplot(df["count_log"], ax=ax[2][0])
df.boxplot("count_log", ax=ax[2][1])
fig.show()
C:\Users\affine\AppData\Roaming\Python\Python37\site-packages\seaborn\distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) C:\Users\affine\AppData\Roaming\Python\Python37\site-packages\seaborn\distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) C:\Users\affine\AppData\Roaming\Python\Python37\site-packages\seaborn\distributions.py:2557: FutureWarning: `distplot` is a deprecated function and will be removed in a future version. Please adapt your code to use either `displot` (a figure-level function with similar flexibility) or `histplot` (an axes-level function for histograms). warnings.warn(msg, FutureWarning) D:\Anaconda3\lib\site-packages\ipykernel_launcher.py:15: UserWarning: Matplotlib is currently using module://ipykernel.pylab.backend_inline, which is a non-GUI backend, so cannot show the figure. from ipykernel import kernelapp as app
df[['windspeed_log']].boxplot()
<matplotlib.axes._subplots.AxesSubplot at 0x236d1d58108>
df[['season','temp','atemp','humidity']].corr()
season | temp | atemp | humidity | |
---|---|---|---|---|
season | 1.000000 | 0.258689 | 0.264744 | 0.190610 |
temp | 0.258689 | 1.000000 | 0.984948 | -0.064949 |
atemp | 0.264744 | 0.984948 | 1.000000 | -0.043536 |
humidity | 0.190610 | -0.064949 | -0.043536 | 1.000000 |
df.plot(kind='scatter',x='temp',y='atemp')
<matplotlib.axes._subplots.AxesSubplot at 0x236d24ec2c8>
df.plot(kind='scatter',x='humidity',y='atemp')
<matplotlib.axes._subplots.AxesSubplot at 0x236d1c832c8>
#calculating correlence array for the attributes bellow
df[["season","temp", "weather", "windspeed", "humidity"]].corr()
season | temp | weather | windspeed | humidity | |
---|---|---|---|---|---|
season | 1.000000 | 0.258689 | 0.008879 | -0.147121 | 0.190610 |
temp | 0.258689 | 1.000000 | -0.055035 | -0.017852 | -0.064949 |
weather | 0.008879 | -0.055035 | 1.000000 | 0.007261 | 0.406244 |
windspeed | -0.147121 | -0.017852 | 0.007261 | 1.000000 | -0.318607 |
humidity | 0.190610 | -0.064949 | 0.406244 | -0.318607 | 1.000000 |
#plotting the holidays distribution excluding the weekends
holiday_df = df[(df["holiday"]==1) & (df["dayofweek"]<5)]
sns.barplot(x = "hour", y = "count", data = holiday_df)
<matplotlib.axes._subplots.AxesSubplot at 0x236d1f7adc8>
holiday_df = df[(df["workingday"]==0) & (df["dayofweek"]<5)]
sns.barplot(x = "hour", y = "count", data = holiday_df)
<matplotlib.axes._subplots.AxesSubplot at 0x236d1663988>
df.head()
season | holiday | workingday | weather | temp | atemp | humidity | windspeed | casual | registered | count | year | month | day | hour | dayofweek | casual_log | registered_log | count_log | windspeed_log | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 81 | 0.0 | 3 | 13 | 16 | 2011 | 1 | 1 | 0 | 5 | 1.386294 | 2.639057 | 2.833213 | 0.693147 |
1 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0 | 8 | 32 | 40 | 2011 | 1 | 1 | 1 | 5 | 2.197225 | 3.496508 | 3.713572 | 0.693147 |
2 | 1 | 0 | 0 | 1 | 9.02 | 13.635 | 80 | 0.0 | 5 | 27 | 32 | 2011 | 1 | 1 | 2 | 5 | 1.791759 | 3.332205 | 3.496508 | 0.693147 |
3 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0 | 3 | 10 | 13 | 2011 | 1 | 1 | 3 | 5 | 1.386294 | 2.397895 | 2.639057 | 0.693147 |
4 | 1 | 0 | 0 | 1 | 9.84 | 14.395 | 75 | 0.0 | 0 | 1 | 1 | 2011 | 1 | 1 | 4 | 5 | 0.000000 | 0.693147 | 0.693147 | 0.693147 |
the distribution of bookings is consistent throughout the month regardles of the days number which leads us to believe that: theres no need of 'day' attribute
the distribution among weeks is consistent, and is about the same for all days except for weekends. which is considered in the workday attribute, leads us to believe that:there's no need for the dayofweek attribute creating a new attribute weekend (0- yes , 1-no)
The booking distribution in holidays is very similar to weekends, and the workingday bolean atribute is taking this variable in account (it's 1 when the day isn't a holiday or a weekend).there's no need for the holiday attribute
temp & atemp are highy correlated . we donot require the atemp attribute
df['weekend']=df['dayofweek'].apply(lambda x : 1 if x>=5 else 0)
df.columns
Index(['season', 'holiday', 'workingday', 'weather', 'temp', 'atemp', 'humidity', 'windspeed', 'casual', 'registered', 'count', 'year', 'month', 'day', 'hour', 'dayofweek', 'casual_log', 'registered_log', 'count_log', 'windspeed_log', 'weekend'], dtype='object')
df.drop(columns=['atemp','dayofweek','holiday','count', 'registered','casual',"windspeed"],inplace=True)
df.head()
season | workingday | weather | temp | humidity | year | month | day | hour | casual_log | registered_log | count_log | windspeed_log | weekend | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1 | 0 | 1 | 9.84 | 81 | 2011 | 1 | 1 | 0 | 1.386294 | 2.639057 | 2.833213 | 0.693147 | 1 |
1 | 1 | 0 | 1 | 9.02 | 80 | 2011 | 1 | 1 | 1 | 2.197225 | 3.496508 | 3.713572 | 0.693147 | 1 |
2 | 1 | 0 | 1 | 9.02 | 80 | 2011 | 1 | 1 | 2 | 1.791759 | 3.332205 | 3.496508 | 0.693147 | 1 |
3 | 1 | 0 | 1 | 9.84 | 75 | 2011 | 1 | 1 | 3 | 1.386294 | 2.397895 | 2.639057 | 0.693147 | 1 |
4 | 1 | 0 | 1 | 9.84 | 75 | 2011 | 1 | 1 | 4 | 0.000000 | 0.693147 | 0.693147 | 0.693147 | 1 |
def one_hot_encode_cagtegorical_col(data, categorical_features):
df_cat = data[categorical_features].copy()
for col in categorical_features:
df_cat = pd.get_dummies(df_cat, columns=[col], prefix=[col], drop_first=True)
return df_cat
categorical_columns=[ "season", "weather"]
df_cat=one_hot_encode_cagtegorical_col(df,categorical_columns)
df_cat.columns
Index(['season_2', 'season_3', 'season_4', 'weather_2', 'weather_3', 'weather_4'], dtype='object')
df_cat.head()
season_2 | season_3 | season_4 | weather_2 | weather_3 | weather_4 | |
---|---|---|---|---|---|---|
0 | 0 | 0 | 0 | 0 | 0 | 0 |
1 | 0 | 0 | 0 | 0 | 0 | 0 |
2 | 0 | 0 | 0 | 0 | 0 | 0 |
3 | 0 | 0 | 0 | 0 | 0 | 0 |
4 | 0 | 0 | 0 | 0 | 0 | 0 |
numerical_col_list =np.setdiff1d(df.columns.to_list(), categorical_columns).tolist()
df[numerical_col_list]
casual_log | count_log | day | hour | humidity | month | registered_log | temp | weekend | windspeed_log | workingday | year | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1.386294 | 2.833213 | 1 | 0 | 81 | 1 | 2.639057 | 9.84 | 1 | 0.693147 | 0 | 2011 |
1 | 2.197225 | 3.713572 | 1 | 1 | 80 | 1 | 3.496508 | 9.02 | 1 | 0.693147 | 0 | 2011 |
2 | 1.791759 | 3.496508 | 1 | 2 | 80 | 1 | 3.332205 | 9.02 | 1 | 0.693147 | 0 | 2011 |
3 | 1.386294 | 2.639057 | 1 | 3 | 75 | 1 | 2.397895 | 9.84 | 1 | 0.693147 | 0 | 2011 |
4 | 0.000000 | 0.693147 | 1 | 4 | 75 | 1 | 0.693147 | 9.84 | 1 | 0.693147 | 0 | 2011 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
10881 | 2.079442 | 5.820083 | 19 | 19 | 50 | 12 | 5.799093 | 15.58 | 0 | 0.693147 | 1 | 2012 |
10882 | 2.397895 | 5.488938 | 19 | 20 | 57 | 12 | 5.446737 | 14.76 | 0 | 0.693147 | 1 | 2012 |
10883 | 1.609438 | 5.129899 | 19 | 21 | 61 | 12 | 5.105945 | 13.94 | 0 | 0.693147 | 1 | 2012 |
10884 | 2.564949 | 4.867534 | 19 | 22 | 61 | 12 | 4.770685 | 13.94 | 0 | 0.693147 | 1 | 2012 |
10885 | 1.609438 | 4.488636 | 19 | 23 | 66 | 12 | 4.442651 | 13.12 | 0 | 0.693147 | 1 | 2012 |
10886 rows × 12 columns
df=pd.concat([df[numerical_col_list],df_cat],axis=1)
df.head()
casual_log | count_log | day | hour | humidity | month | registered_log | temp | weekend | windspeed_log | workingday | year | season_2 | season_3 | season_4 | weather_2 | weather_3 | weather_4 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 1.386294 | 2.833213 | 1 | 0 | 81 | 1 | 2.639057 | 9.84 | 1 | 0.693147 | 0 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
1 | 2.197225 | 3.713572 | 1 | 1 | 80 | 1 | 3.496508 | 9.02 | 1 | 0.693147 | 0 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
2 | 1.791759 | 3.496508 | 1 | 2 | 80 | 1 | 3.332205 | 9.02 | 1 | 0.693147 | 0 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
3 | 1.386294 | 2.639057 | 1 | 3 | 75 | 1 | 2.397895 | 9.84 | 1 | 0.693147 | 0 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
4 | 0.000000 | 0.693147 | 1 | 4 | 75 | 1 | 0.693147 | 9.84 | 1 | 0.693147 | 0 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
df.shape
(10886, 18)
#split data into train & validation set
df_train = df[df['day']<15]
df_test = df[df['day']>=15]
x_train =df_train.drop(columns=['casual_log','count_log','registered_log'])
y_train=df_train['count_log']
x_test =df_test.drop(columns=['casual_log','count_log','registered_log'])
y_test=df_test['count_log']
from sklearn.linear_model import LinearRegression
from xgboost import XGBRegressor
from sklearn.ensemble import RandomForestRegressor
from catboost import CatBoostRegressor
from lightgbm import LGBMRegressor
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import r2_score
from datetime import datetime
selected_models={
'linear regression':{
'model':LinearRegression()
},
'random forest':{
'model':RandomForestRegressor()
},
'xgboost':{
'model':XGBRegressor()
},
'catboost':{
'model':CatBoostRegressor()
},
'light gradient boost':{
'model':LGBMRegressor()
}
}
results=[]
for model_nme, model_dict in selected_models.items():
model=model_dict['model']
start_time=datetime.now()
model.fit(x_train,y_train)
end_time=datetime.now()
time_tot =end_time-start_time
results.append({
'model name':model_nme,
'model':model,
'train score':model.score(x_train,y_train),
'r2 score':r2_score(y_test,model.predict(x_test)),
'time(s)':time_tot.seconds+(time_tot.microseconds/10**6),
'pred':model.predict(x_test)
})
D:\Anaconda3\lib\site-packages\xgboost\core.py:587: FutureWarning: Series.base is deprecated and will be removed in a future version if getattr(data, 'base', None) is not None and \ D:\Anaconda3\lib\site-packages\xgboost\core.py:588: FutureWarning: Series.base is deprecated and will be removed in a future version data.base is not None and isinstance(data, np.ndarray) \
[10:21:28] WARNING: C:/Jenkins/workspace/xgboost-win64_release_0.90/src/objective/regression_obj.cu:152: reg:linear is now deprecated in favor of reg:squarederror. Learning rate set to 0.056546 0: learn: 1.3749737 total: 3.64ms remaining: 3.64s 1: learn: 1.3202685 total: 8.04ms remaining: 4.01s 2: learn: 1.2725750 total: 11.8ms remaining: 3.94s 3: learn: 1.2251351 total: 15.4ms remaining: 3.82s 4: learn: 1.1798559 total: 19.2ms remaining: 3.83s 5: learn: 1.1391239 total: 23.7ms remaining: 3.92s 6: learn: 1.0990805 total: 27.8ms remaining: 3.94s 7: learn: 1.0629819 total: 31.6ms remaining: 3.92s 8: learn: 1.0288472 total: 35.1ms remaining: 3.87s 9: learn: 0.9941183 total: 40ms remaining: 3.96s 10: learn: 0.9600053 total: 43.5ms remaining: 3.91s 11: learn: 0.9282114 total: 47.3ms remaining: 3.89s 12: learn: 0.9021440 total: 50.9ms remaining: 3.87s 13: learn: 0.8724114 total: 55.7ms remaining: 3.92s 14: learn: 0.8450165 total: 59.2ms remaining: 3.89s 15: learn: 0.8241443 total: 63ms remaining: 3.87s 16: learn: 0.8001363 total: 66.5ms remaining: 3.85s 17: learn: 0.7824664 total: 71.2ms remaining: 3.89s 18: learn: 0.7630446 total: 74.8ms remaining: 3.86s 19: learn: 0.7466682 total: 78.8ms remaining: 3.86s 20: learn: 0.7295079 total: 82.4ms remaining: 3.84s 21: learn: 0.7154371 total: 87.1ms remaining: 3.87s 22: learn: 0.7019817 total: 90.3ms remaining: 3.84s 23: learn: 0.6859212 total: 93.7ms remaining: 3.81s 24: learn: 0.6711159 total: 96.9ms remaining: 3.78s 25: learn: 0.6534669 total: 101ms remaining: 3.79s 26: learn: 0.6417217 total: 104ms remaining: 3.76s 27: learn: 0.6315741 total: 108ms remaining: 3.74s 28: learn: 0.6201196 total: 111ms remaining: 3.73s 29: learn: 0.6093139 total: 115ms remaining: 3.73s 30: learn: 0.5946097 total: 119ms remaining: 3.72s 31: learn: 0.5793868 total: 122ms remaining: 3.7s 32: learn: 0.5707474 total: 126ms remaining: 3.69s 33: learn: 0.5629595 total: 129ms remaining: 3.67s 34: learn: 0.5532475 total: 133ms remaining: 3.66s 35: learn: 0.5436513 total: 136ms remaining: 3.64s 36: learn: 0.5373027 total: 139ms remaining: 3.63s 37: learn: 0.5303667 total: 143ms remaining: 3.61s 38: learn: 0.5203498 total: 146ms remaining: 3.61s 39: learn: 0.5120675 total: 150ms remaining: 3.59s 40: learn: 0.5020627 total: 153ms remaining: 3.58s 41: learn: 0.4947946 total: 156ms remaining: 3.56s 42: learn: 0.4897466 total: 160ms remaining: 3.56s 43: learn: 0.4864284 total: 163ms remaining: 3.55s 44: learn: 0.4791921 total: 169ms remaining: 3.58s 45: learn: 0.4756151 total: 173ms remaining: 3.58s 46: learn: 0.4683154 total: 182ms remaining: 3.69s 47: learn: 0.4619010 total: 186ms remaining: 3.69s 48: learn: 0.4586851 total: 190ms remaining: 3.69s 49: learn: 0.4552328 total: 195ms remaining: 3.7s 50: learn: 0.4499257 total: 202ms remaining: 3.76s 51: learn: 0.4463554 total: 207ms remaining: 3.77s 52: learn: 0.4408057 total: 210ms remaining: 3.76s 53: learn: 0.4385256 total: 214ms remaining: 3.74s 54: learn: 0.4349549 total: 217ms remaining: 3.73s 55: learn: 0.4324756 total: 223ms remaining: 3.77s 56: learn: 0.4268653 total: 228ms remaining: 3.77s 57: learn: 0.4243264 total: 236ms remaining: 3.84s 58: learn: 0.4217081 total: 246ms remaining: 3.92s 59: learn: 0.4194522 total: 249ms remaining: 3.9s 60: learn: 0.4178058 total: 253ms remaining: 3.9s 61: learn: 0.4141479 total: 257ms remaining: 3.88s 62: learn: 0.4091318 total: 260ms remaining: 3.87s 63: learn: 0.4057448 total: 263ms remaining: 3.85s 64: learn: 0.4010367 total: 268ms remaining: 3.85s 65: learn: 0.3990128 total: 271ms remaining: 3.83s 66: learn: 0.3974862 total: 274ms remaining: 3.82s 67: learn: 0.3962964 total: 278ms remaining: 3.8s 68: learn: 0.3949049 total: 281ms remaining: 3.79s 69: learn: 0.3930489 total: 285ms remaining: 3.78s 70: learn: 0.3920066 total: 288ms remaining: 3.77s 71: learn: 0.3887221 total: 292ms remaining: 3.76s 72: learn: 0.3863921 total: 295ms remaining: 3.75s 73: learn: 0.3843220 total: 299ms remaining: 3.75s 74: learn: 0.3814990 total: 303ms remaining: 3.73s 75: learn: 0.3779267 total: 306ms remaining: 3.72s 76: learn: 0.3752831 total: 309ms remaining: 3.71s 77: learn: 0.3726264 total: 314ms remaining: 3.71s 78: learn: 0.3717625 total: 317ms remaining: 3.69s 79: learn: 0.3710212 total: 320ms remaining: 3.68s 80: learn: 0.3701021 total: 324ms remaining: 3.67s 81: learn: 0.3671205 total: 327ms remaining: 3.66s 82: learn: 0.3648147 total: 331ms remaining: 3.65s 83: learn: 0.3634937 total: 334ms remaining: 3.64s 84: learn: 0.3628985 total: 338ms remaining: 3.64s 85: learn: 0.3606087 total: 342ms remaining: 3.64s 86: learn: 0.3588517 total: 351ms remaining: 3.69s 87: learn: 0.3571565 total: 355ms remaining: 3.68s 88: learn: 0.3563432 total: 365ms remaining: 3.73s 89: learn: 0.3557229 total: 368ms remaining: 3.72s 90: learn: 0.3550657 total: 373ms remaining: 3.73s 91: learn: 0.3529319 total: 376ms remaining: 3.71s 92: learn: 0.3523527 total: 379ms remaining: 3.7s 93: learn: 0.3515990 total: 383ms remaining: 3.69s 94: learn: 0.3508760 total: 386ms remaining: 3.68s 95: learn: 0.3501276 total: 389ms remaining: 3.67s 96: learn: 0.3484923 total: 393ms remaining: 3.65s 97: learn: 0.3476322 total: 396ms remaining: 3.65s 98: learn: 0.3471539 total: 399ms remaining: 3.63s 99: learn: 0.3465064 total: 403ms remaining: 3.62s 100: learn: 0.3457796 total: 406ms remaining: 3.61s 101: learn: 0.3446788 total: 409ms remaining: 3.6s 102: learn: 0.3442521 total: 413ms remaining: 3.59s 103: learn: 0.3426716 total: 416ms remaining: 3.58s 104: learn: 0.3420138 total: 419ms remaining: 3.57s 105: learn: 0.3408922 total: 422ms remaining: 3.56s 106: learn: 0.3403163 total: 426ms remaining: 3.55s 107: learn: 0.3390602 total: 429ms remaining: 3.54s 108: learn: 0.3386132 total: 433ms remaining: 3.54s 109: learn: 0.3377547 total: 436ms remaining: 3.53s 110: learn: 0.3364323 total: 439ms remaining: 3.52s 111: learn: 0.3362083 total: 443ms remaining: 3.51s 112: learn: 0.3356587 total: 446ms remaining: 3.5s 113: learn: 0.3341606 total: 454ms remaining: 3.52s 114: learn: 0.3333439 total: 457ms remaining: 3.51s 115: learn: 0.3328052 total: 460ms remaining: 3.5s 116: learn: 0.3321835 total: 463ms remaining: 3.5s 117: learn: 0.3315779 total: 467ms remaining: 3.49s 118: learn: 0.3311458 total: 470ms remaining: 3.48s 119: learn: 0.3308133 total: 474ms remaining: 3.47s 120: learn: 0.3304633 total: 477ms remaining: 3.46s 121: learn: 0.3301282 total: 480ms remaining: 3.46s 122: learn: 0.3291940 total: 484ms remaining: 3.45s 123: learn: 0.3289359 total: 487ms remaining: 3.44s 124: learn: 0.3285937 total: 490ms remaining: 3.43s 125: learn: 0.3282522 total: 494ms remaining: 3.42s 126: learn: 0.3275590 total: 497ms remaining: 3.42s 127: learn: 0.3271271 total: 500ms remaining: 3.41s 128: learn: 0.3260715 total: 510ms remaining: 3.44s 129: learn: 0.3251012 total: 517ms remaining: 3.46s 130: learn: 0.3245562 total: 523ms remaining: 3.47s 131: learn: 0.3242791 total: 527ms remaining: 3.46s 132: learn: 0.3237560 total: 531ms remaining: 3.46s 133: learn: 0.3232120 total: 535ms remaining: 3.46s 134: learn: 0.3228253 total: 541ms remaining: 3.46s 135: learn: 0.3219950 total: 544ms remaining: 3.46s 136: learn: 0.3215759 total: 548ms remaining: 3.45s 137: learn: 0.3212710 total: 552ms remaining: 3.45s 138: learn: 0.3207484 total: 556ms remaining: 3.44s 139: learn: 0.3199692 total: 559ms remaining: 3.44s 140: learn: 0.3195843 total: 563ms remaining: 3.43s 141: learn: 0.3192520 total: 567ms remaining: 3.43s 142: learn: 0.3188607 total: 571ms remaining: 3.42s 143: learn: 0.3182372 total: 575ms remaining: 3.42s 144: learn: 0.3180452 total: 578ms remaining: 3.41s 145: learn: 0.3176896 total: 582ms remaining: 3.4s 146: learn: 0.3171194 total: 586ms remaining: 3.4s 147: learn: 0.3165146 total: 590ms remaining: 3.4s 148: learn: 0.3158994 total: 594ms remaining: 3.39s 149: learn: 0.3153629 total: 598ms remaining: 3.39s 150: learn: 0.3148912 total: 605ms remaining: 3.4s 151: learn: 0.3144249 total: 610ms remaining: 3.4s 152: learn: 0.3138362 total: 614ms remaining: 3.4s 153: learn: 0.3134864 total: 617ms remaining: 3.39s 154: learn: 0.3131260 total: 621ms remaining: 3.38s 155: learn: 0.3129733 total: 624ms remaining: 3.37s 156: learn: 0.3127793 total: 628ms remaining: 3.37s 157: learn: 0.3123960 total: 631ms remaining: 3.36s 158: learn: 0.3122254 total: 634ms remaining: 3.35s 159: learn: 0.3118059 total: 637ms remaining: 3.35s 160: learn: 0.3113156 total: 641ms remaining: 3.34s 161: learn: 0.3110951 total: 645ms remaining: 3.33s 162: learn: 0.3106304 total: 648ms remaining: 3.33s 163: learn: 0.3104329 total: 651ms remaining: 3.32s 164: learn: 0.3098942 total: 654ms remaining: 3.31s 165: learn: 0.3095445 total: 658ms remaining: 3.31s 166: learn: 0.3091924 total: 661ms remaining: 3.3s 167: learn: 0.3089012 total: 665ms remaining: 3.29s 168: learn: 0.3086059 total: 668ms remaining: 3.28s 169: learn: 0.3082838 total: 671ms remaining: 3.28s 170: learn: 0.3080556 total: 676ms remaining: 3.27s 171: learn: 0.3077473 total: 683ms remaining: 3.29s 172: learn: 0.3073708 total: 687ms remaining: 3.29s 173: learn: 0.3070890 total: 691ms remaining: 3.28s 174: learn: 0.3066708 total: 695ms remaining: 3.27s 175: learn: 0.3063720 total: 699ms remaining: 3.27s 176: learn: 0.3061718 total: 704ms remaining: 3.27s 177: learn: 0.3058960 total: 708ms remaining: 3.27s 178: learn: 0.3054761 total: 711ms remaining: 3.26s 179: learn: 0.3052930 total: 715ms remaining: 3.25s 180: learn: 0.3050078 total: 719ms remaining: 3.25s 181: learn: 0.3048692 total: 722ms remaining: 3.25s 182: learn: 0.3044876 total: 726ms remaining: 3.24s 183: learn: 0.3041969 total: 729ms remaining: 3.23s 184: learn: 0.3037733 total: 733ms remaining: 3.23s 185: learn: 0.3035620 total: 736ms remaining: 3.22s 186: learn: 0.3032266 total: 740ms remaining: 3.21s 187: learn: 0.3029201 total: 743ms remaining: 3.21s 188: learn: 0.3026835 total: 747ms remaining: 3.2s 189: learn: 0.3024947 total: 750ms remaining: 3.2s 190: learn: 0.3022068 total: 753ms remaining: 3.19s 191: learn: 0.3020112 total: 756ms remaining: 3.18s 192: learn: 0.3017242 total: 760ms remaining: 3.18s 193: learn: 0.3015675 total: 763ms remaining: 3.17s 194: learn: 0.3013220 total: 766ms remaining: 3.16s 195: learn: 0.3010756 total: 770ms remaining: 3.16s 196: learn: 0.3007365 total: 773ms remaining: 3.15s 197: learn: 0.3005133 total: 777ms remaining: 3.15s 198: learn: 0.3003492 total: 780ms remaining: 3.14s 199: learn: 0.3001661 total: 783ms remaining: 3.13s 200: learn: 0.2999112 total: 786ms remaining: 3.13s 201: learn: 0.2996422 total: 790ms remaining: 3.12s 202: learn: 0.2993356 total: 793ms remaining: 3.11s 203: learn: 0.2990955 total: 797ms remaining: 3.11s 204: learn: 0.2987875 total: 800ms remaining: 3.1s 205: learn: 0.2985654 total: 804ms remaining: 3.1s 206: learn: 0.2984141 total: 807ms remaining: 3.09s 207: learn: 0.2981939 total: 811ms remaining: 3.09s 208: learn: 0.2978780 total: 814ms remaining: 3.08s 209: learn: 0.2976001 total: 818ms remaining: 3.08s 210: learn: 0.2974219 total: 821ms remaining: 3.07s 211: learn: 0.2971997 total: 824ms remaining: 3.06s 212: learn: 0.2970250 total: 828ms remaining: 3.06s 213: learn: 0.2968885 total: 831ms remaining: 3.05s 214: learn: 0.2967163 total: 834ms remaining: 3.05s 215: learn: 0.2965557 total: 838ms remaining: 3.04s 216: learn: 0.2964122 total: 842ms remaining: 3.04s 217: learn: 0.2962032 total: 846ms remaining: 3.04s 218: learn: 0.2960097 total: 850ms remaining: 3.03s 219: learn: 0.2957014 total: 858ms remaining: 3.04s 220: learn: 0.2954230 total: 862ms remaining: 3.04s 221: learn: 0.2952002 total: 866ms remaining: 3.03s 222: learn: 0.2950404 total: 870ms remaining: 3.03s 223: learn: 0.2949176 total: 874ms remaining: 3.03s 224: learn: 0.2947667 total: 878ms remaining: 3.02s 225: learn: 0.2946456 total: 883ms remaining: 3.02s 226: learn: 0.2944405 total: 886ms remaining: 3.02s 227: learn: 0.2942955 total: 890ms remaining: 3.01s 228: learn: 0.2941223 total: 893ms remaining: 3.01s 229: learn: 0.2938174 total: 897ms remaining: 3s 230: learn: 0.2936145 total: 900ms remaining: 3s 231: learn: 0.2932668 total: 904ms remaining: 2.99s 232: learn: 0.2930326 total: 907ms remaining: 2.98s 233: learn: 0.2929063 total: 910ms remaining: 2.98s 234: learn: 0.2927673 total: 914ms remaining: 2.97s 235: learn: 0.2926531 total: 917ms remaining: 2.97s 236: learn: 0.2924220 total: 920ms remaining: 2.96s 237: learn: 0.2922005 total: 924ms remaining: 2.96s 238: learn: 0.2920360 total: 927ms remaining: 2.95s 239: learn: 0.2919083 total: 931ms remaining: 2.95s 240: learn: 0.2917376 total: 934ms remaining: 2.94s 241: learn: 0.2915775 total: 937ms remaining: 2.94s 242: learn: 0.2913864 total: 941ms remaining: 2.93s 243: learn: 0.2911126 total: 944ms remaining: 2.92s 244: learn: 0.2910265 total: 947ms remaining: 2.92s 245: learn: 0.2908806 total: 951ms remaining: 2.91s 246: learn: 0.2905615 total: 954ms remaining: 2.91s 247: learn: 0.2902749 total: 958ms remaining: 2.9s 248: learn: 0.2900754 total: 961ms remaining: 2.9s 249: learn: 0.2897913 total: 964ms remaining: 2.89s 250: learn: 0.2894847 total: 968ms remaining: 2.89s 251: learn: 0.2892841 total: 971ms remaining: 2.88s 252: learn: 0.2891784 total: 974ms remaining: 2.88s 253: learn: 0.2890318 total: 978ms remaining: 2.87s 254: learn: 0.2888128 total: 981ms remaining: 2.87s 255: learn: 0.2886236 total: 985ms remaining: 2.86s 256: learn: 0.2884855 total: 988ms remaining: 2.86s 257: learn: 0.2883359 total: 991ms remaining: 2.85s 258: learn: 0.2882247 total: 994ms remaining: 2.85s 259: learn: 0.2881207 total: 998ms remaining: 2.84s 260: learn: 0.2879694 total: 1s remaining: 2.83s 261: learn: 0.2878212 total: 1s remaining: 2.83s 262: learn: 0.2877213 total: 1.01s remaining: 2.83s 263: learn: 0.2875798 total: 1.02s remaining: 2.84s 264: learn: 0.2873658 total: 1.02s remaining: 2.83s 265: learn: 0.2871890 total: 1.03s remaining: 2.85s 266: learn: 0.2870211 total: 1.03s remaining: 2.84s 267: learn: 0.2867762 total: 1.04s remaining: 2.83s 268: learn: 0.2866392 total: 1.04s remaining: 2.83s 269: learn: 0.2865334 total: 1.04s remaining: 2.82s 270: learn: 0.2863629 total: 1.05s remaining: 2.82s 271: learn: 0.2862222 total: 1.05s remaining: 2.81s 272: learn: 0.2860566 total: 1.05s remaining: 2.81s 273: learn: 0.2858048 total: 1.06s remaining: 2.8s 274: learn: 0.2856793 total: 1.06s remaining: 2.8s 275: learn: 0.2854368 total: 1.06s remaining: 2.79s 276: learn: 0.2853200 total: 1.07s remaining: 2.79s 277: learn: 0.2852269 total: 1.07s remaining: 2.78s 278: learn: 0.2851275 total: 1.07s remaining: 2.78s 279: learn: 0.2848390 total: 1.08s remaining: 2.77s 280: learn: 0.2846262 total: 1.08s remaining: 2.77s 281: learn: 0.2844088 total: 1.08s remaining: 2.76s 282: learn: 0.2843248 total: 1.09s remaining: 2.76s 283: learn: 0.2840835 total: 1.09s remaining: 2.76s 284: learn: 0.2838878 total: 1.1s remaining: 2.75s 285: learn: 0.2837349 total: 1.1s remaining: 2.75s 286: learn: 0.2834746 total: 1.1s remaining: 2.74s 287: learn: 0.2832253 total: 1.11s remaining: 2.74s 288: learn: 0.2830403 total: 1.11s remaining: 2.73s 289: learn: 0.2828169 total: 1.11s remaining: 2.73s 290: learn: 0.2826839 total: 1.12s remaining: 2.72s 291: learn: 0.2825252 total: 1.12s remaining: 2.72s 292: learn: 0.2823311 total: 1.12s remaining: 2.71s 293: learn: 0.2821915 total: 1.13s remaining: 2.71s 294: learn: 0.2821406 total: 1.13s remaining: 2.7s 295: learn: 0.2819230 total: 1.13s remaining: 2.7s 296: learn: 0.2817950 total: 1.14s remaining: 2.69s 297: learn: 0.2815599 total: 1.14s remaining: 2.69s 298: learn: 0.2814782 total: 1.14s remaining: 2.68s 299: learn: 0.2814256 total: 1.15s remaining: 2.68s 300: learn: 0.2812974 total: 1.15s remaining: 2.67s 301: learn: 0.2811640 total: 1.15s remaining: 2.67s 302: learn: 0.2810048 total: 1.16s remaining: 2.66s 303: learn: 0.2808188 total: 1.16s remaining: 2.66s 304: learn: 0.2806812 total: 1.16s remaining: 2.65s 305: learn: 0.2803804 total: 1.17s remaining: 2.65s 306: learn: 0.2801372 total: 1.17s remaining: 2.64s 307: learn: 0.2800519 total: 1.17s remaining: 2.64s 308: learn: 0.2798999 total: 1.19s remaining: 2.66s 309: learn: 0.2796666 total: 1.2s remaining: 2.67s 310: learn: 0.2796041 total: 1.2s remaining: 2.66s 311: learn: 0.2794908 total: 1.21s remaining: 2.66s 312: learn: 0.2793434 total: 1.22s remaining: 2.67s 313: learn: 0.2791515 total: 1.22s remaining: 2.67s 314: learn: 0.2789353 total: 1.22s remaining: 2.66s 315: learn: 0.2788684 total: 1.23s remaining: 2.66s 316: learn: 0.2787636 total: 1.23s remaining: 2.65s 317: learn: 0.2784770 total: 1.24s remaining: 2.65s 318: learn: 0.2782301 total: 1.24s remaining: 2.65s 319: learn: 0.2778722 total: 1.25s remaining: 2.65s 320: learn: 0.2777688 total: 1.26s remaining: 2.66s 321: learn: 0.2776319 total: 1.26s remaining: 2.65s 322: learn: 0.2775207 total: 1.26s remaining: 2.65s 323: learn: 0.2772684 total: 1.27s remaining: 2.65s 324: learn: 0.2771331 total: 1.27s remaining: 2.64s 325: learn: 0.2770282 total: 1.27s remaining: 2.64s 326: learn: 0.2768654 total: 1.28s remaining: 2.63s 327: learn: 0.2767555 total: 1.28s remaining: 2.63s 328: learn: 0.2764970 total: 1.29s remaining: 2.63s 329: learn: 0.2762946 total: 1.29s remaining: 2.63s 330: learn: 0.2760613 total: 1.3s remaining: 2.63s 331: learn: 0.2759591 total: 1.3s remaining: 2.63s 332: learn: 0.2757613 total: 1.31s remaining: 2.62s 333: learn: 0.2756238 total: 1.31s remaining: 2.62s 334: learn: 0.2754821 total: 1.32s remaining: 2.61s 335: learn: 0.2753985 total: 1.32s remaining: 2.61s 336: learn: 0.2752850 total: 1.32s remaining: 2.61s 337: learn: 0.2751602 total: 1.33s remaining: 2.61s 338: learn: 0.2749525 total: 1.34s remaining: 2.6s 339: learn: 0.2748372 total: 1.34s remaining: 2.6s 340: learn: 0.2745888 total: 1.35s remaining: 2.61s 341: learn: 0.2745025 total: 1.36s remaining: 2.62s 342: learn: 0.2743653 total: 1.36s remaining: 2.61s 343: learn: 0.2742577 total: 1.37s remaining: 2.62s 344: learn: 0.2741288 total: 1.38s remaining: 2.62s 345: learn: 0.2740575 total: 1.38s remaining: 2.61s 346: learn: 0.2739990 total: 1.39s remaining: 2.61s 347: learn: 0.2737719 total: 1.39s remaining: 2.61s 348: learn: 0.2736320 total: 1.4s remaining: 2.6s 349: learn: 0.2734428 total: 1.4s remaining: 2.6s 350: learn: 0.2733257 total: 1.4s remaining: 2.6s 351: learn: 0.2732226 total: 1.41s remaining: 2.6s 352: learn: 0.2731361 total: 1.42s remaining: 2.6s 353: learn: 0.2729514 total: 1.42s remaining: 2.6s 354: learn: 0.2727241 total: 1.43s remaining: 2.59s 355: learn: 0.2726143 total: 1.43s remaining: 2.59s 356: learn: 0.2724556 total: 1.43s remaining: 2.58s 357: learn: 0.2723711 total: 1.44s remaining: 2.58s 358: learn: 0.2722243 total: 1.44s remaining: 2.57s 359: learn: 0.2720301 total: 1.45s remaining: 2.58s 360: learn: 0.2718048 total: 1.46s remaining: 2.58s 361: learn: 0.2717473 total: 1.46s remaining: 2.57s 362: learn: 0.2715671 total: 1.47s remaining: 2.57s 363: learn: 0.2714980 total: 1.47s remaining: 2.57s 364: learn: 0.2713106 total: 1.47s remaining: 2.56s 365: learn: 0.2712039 total: 1.48s remaining: 2.56s 366: learn: 0.2711068 total: 1.48s remaining: 2.56s 367: learn: 0.2709695 total: 1.49s remaining: 2.56s 368: learn: 0.2708866 total: 1.49s remaining: 2.55s 369: learn: 0.2708364 total: 1.5s remaining: 2.55s 370: learn: 0.2706628 total: 1.5s remaining: 2.54s 371: learn: 0.2704430 total: 1.5s remaining: 2.54s 372: learn: 0.2703801 total: 1.51s remaining: 2.54s 373: learn: 0.2702900 total: 1.51s remaining: 2.53s 374: learn: 0.2701495 total: 1.52s remaining: 2.53s 375: learn: 0.2700202 total: 1.53s remaining: 2.53s 376: learn: 0.2699522 total: 1.53s remaining: 2.53s 377: learn: 0.2697838 total: 1.53s remaining: 2.53s 378: learn: 0.2696645 total: 1.54s remaining: 2.53s 379: learn: 0.2695345 total: 1.55s remaining: 2.53s 380: learn: 0.2693779 total: 1.55s remaining: 2.52s 381: learn: 0.2692520 total: 1.56s remaining: 2.52s 382: learn: 0.2691232 total: 1.56s remaining: 2.52s 383: learn: 0.2690340 total: 1.57s remaining: 2.51s 384: learn: 0.2688683 total: 1.57s remaining: 2.51s 385: learn: 0.2687564 total: 1.58s remaining: 2.51s 386: learn: 0.2684686 total: 1.58s remaining: 2.5s 387: learn: 0.2682308 total: 1.58s remaining: 2.5s 388: learn: 0.2680988 total: 1.59s remaining: 2.5s 389: learn: 0.2680132 total: 1.59s remaining: 2.49s 390: learn: 0.2678600 total: 1.59s remaining: 2.48s 391: learn: 0.2676456 total: 1.6s remaining: 2.48s 392: learn: 0.2675392 total: 1.6s remaining: 2.48s 393: learn: 0.2674507 total: 1.6s remaining: 2.47s 394: learn: 0.2673998 total: 1.61s remaining: 2.46s 395: learn: 0.2671864 total: 1.61s remaining: 2.46s 396: learn: 0.2670841 total: 1.62s remaining: 2.45s 397: learn: 0.2668796 total: 1.62s remaining: 2.45s 398: learn: 0.2666884 total: 1.62s remaining: 2.44s 399: learn: 0.2664891 total: 1.63s remaining: 2.44s 400: learn: 0.2663574 total: 1.63s remaining: 2.43s 401: learn: 0.2662547 total: 1.63s remaining: 2.43s 402: learn: 0.2661064 total: 1.64s remaining: 2.42s 403: learn: 0.2658883 total: 1.64s remaining: 2.42s 404: learn: 0.2657906 total: 1.64s remaining: 2.41s 405: learn: 0.2656547 total: 1.65s remaining: 2.41s 406: learn: 0.2655211 total: 1.65s remaining: 2.4s 407: learn: 0.2654305 total: 1.65s remaining: 2.4s 408: learn: 0.2652390 total: 1.66s remaining: 2.39s 409: learn: 0.2651964 total: 1.66s remaining: 2.39s 410: learn: 0.2649991 total: 1.66s remaining: 2.38s 411: learn: 0.2649179 total: 1.67s remaining: 2.38s 412: learn: 0.2646749 total: 1.67s remaining: 2.37s 413: learn: 0.2645711 total: 1.67s remaining: 2.37s 414: learn: 0.2645186 total: 1.68s remaining: 2.36s 415: learn: 0.2644780 total: 1.68s remaining: 2.36s 416: learn: 0.2644482 total: 1.68s remaining: 2.35s 417: learn: 0.2643629 total: 1.69s remaining: 2.35s 418: learn: 0.2641658 total: 1.7s remaining: 2.35s 419: learn: 0.2640240 total: 1.7s remaining: 2.35s 420: learn: 0.2639250 total: 1.7s remaining: 2.34s 421: learn: 0.2637978 total: 1.71s remaining: 2.34s 422: learn: 0.2636782 total: 1.71s remaining: 2.33s 423: learn: 0.2635344 total: 1.71s remaining: 2.33s 424: learn: 0.2634880 total: 1.72s remaining: 2.33s 425: learn: 0.2633779 total: 1.72s remaining: 2.32s 426: learn: 0.2632474 total: 1.73s remaining: 2.32s 427: learn: 0.2631369 total: 1.73s remaining: 2.31s 428: learn: 0.2630485 total: 1.73s remaining: 2.31s 429: learn: 0.2628024 total: 1.74s remaining: 2.3s 430: learn: 0.2626457 total: 1.74s remaining: 2.3s 431: learn: 0.2625426 total: 1.74s remaining: 2.29s 432: learn: 0.2624424 total: 1.75s remaining: 2.29s 433: learn: 0.2624077 total: 1.75s remaining: 2.28s 434: learn: 0.2623355 total: 1.75s remaining: 2.28s 435: learn: 0.2622700 total: 1.76s remaining: 2.27s 436: learn: 0.2620511 total: 1.76s remaining: 2.27s 437: learn: 0.2619764 total: 1.76s remaining: 2.26s 438: learn: 0.2617171 total: 1.77s remaining: 2.26s 439: learn: 0.2614874 total: 1.77s remaining: 2.26s 440: learn: 0.2614352 total: 1.78s remaining: 2.25s 441: learn: 0.2613545 total: 1.78s remaining: 2.25s 442: learn: 0.2612656 total: 1.78s remaining: 2.24s 443: learn: 0.2611059 total: 1.79s remaining: 2.24s 444: learn: 0.2609896 total: 1.79s remaining: 2.23s 445: learn: 0.2609256 total: 1.79s remaining: 2.23s 446: learn: 0.2608129 total: 1.8s remaining: 2.23s 447: learn: 0.2607387 total: 1.8s remaining: 2.22s 448: learn: 0.2605056 total: 1.8s remaining: 2.21s 449: learn: 0.2604160 total: 1.81s remaining: 2.21s 450: learn: 0.2602371 total: 1.81s remaining: 2.21s 451: learn: 0.2602037 total: 1.81s remaining: 2.2s 452: learn: 0.2600912 total: 1.82s remaining: 2.2s 453: learn: 0.2599282 total: 1.82s remaining: 2.19s 454: learn: 0.2598345 total: 1.82s remaining: 2.19s 455: learn: 0.2597922 total: 1.83s remaining: 2.18s 456: learn: 0.2596266 total: 1.83s remaining: 2.18s 457: learn: 0.2595433 total: 1.83s remaining: 2.17s 458: learn: 0.2594090 total: 1.84s remaining: 2.17s 459: learn: 0.2593035 total: 1.84s remaining: 2.16s 460: learn: 0.2592153 total: 1.85s remaining: 2.16s 461: learn: 0.2591271 total: 1.85s remaining: 2.15s 462: learn: 0.2590498 total: 1.86s remaining: 2.16s 463: learn: 0.2588946 total: 1.86s remaining: 2.15s 464: learn: 0.2588199 total: 1.87s remaining: 2.15s 465: learn: 0.2587395 total: 1.87s remaining: 2.14s 466: learn: 0.2586959 total: 1.87s remaining: 2.14s 467: learn: 0.2585497 total: 1.88s remaining: 2.14s 468: learn: 0.2583144 total: 1.88s remaining: 2.13s 469: learn: 0.2581775 total: 1.89s remaining: 2.13s 470: learn: 0.2579865 total: 1.89s remaining: 2.12s 471: learn: 0.2578553 total: 1.89s remaining: 2.12s 472: learn: 0.2577503 total: 1.9s remaining: 2.11s 473: learn: 0.2577023 total: 1.9s remaining: 2.11s 474: learn: 0.2576833 total: 1.9s remaining: 2.1s 475: learn: 0.2575463 total: 1.91s remaining: 2.1s 476: learn: 0.2574282 total: 1.91s remaining: 2.1s 477: learn: 0.2573764 total: 1.91s remaining: 2.09s 478: learn: 0.2571829 total: 1.92s remaining: 2.09s 479: learn: 0.2570700 total: 1.92s remaining: 2.08s 480: learn: 0.2568594 total: 1.92s remaining: 2.08s 481: learn: 0.2567211 total: 1.93s remaining: 2.07s 482: learn: 0.2566405 total: 1.93s remaining: 2.07s 483: learn: 0.2565167 total: 1.93s remaining: 2.06s 484: learn: 0.2563877 total: 1.94s remaining: 2.06s 485: learn: 0.2563038 total: 1.94s remaining: 2.05s 486: learn: 0.2562131 total: 1.95s remaining: 2.05s 487: learn: 0.2560718 total: 1.95s remaining: 2.04s 488: learn: 0.2560210 total: 1.95s remaining: 2.04s 489: learn: 0.2559922 total: 1.96s remaining: 2.03s 490: learn: 0.2559155 total: 1.96s remaining: 2.03s 491: learn: 0.2558589 total: 1.96s remaining: 2.02s 492: learn: 0.2557019 total: 1.97s remaining: 2.02s 493: learn: 0.2556647 total: 1.97s remaining: 2.02s 494: learn: 0.2554779 total: 1.97s remaining: 2.01s 495: learn: 0.2553424 total: 1.98s remaining: 2.01s 496: learn: 0.2552908 total: 1.98s remaining: 2s 497: learn: 0.2552156 total: 1.98s remaining: 2s 498: learn: 0.2551092 total: 1.99s remaining: 1.99s 499: learn: 0.2550218 total: 1.99s remaining: 1.99s 500: learn: 0.2549074 total: 1.99s remaining: 1.98s 501: learn: 0.2547900 total: 2s remaining: 1.98s 502: learn: 0.2546525 total: 2s remaining: 1.98s 503: learn: 0.2545217 total: 2s remaining: 1.97s 504: learn: 0.2543599 total: 2s remaining: 1.97s 505: learn: 0.2541854 total: 2.01s remaining: 1.96s 506: learn: 0.2541332 total: 2.01s remaining: 1.96s 507: learn: 0.2539874 total: 2.02s remaining: 1.95s 508: learn: 0.2538971 total: 2.02s remaining: 1.95s 509: learn: 0.2537850 total: 2.02s remaining: 1.95s 510: learn: 0.2536828 total: 2.03s remaining: 1.94s 511: learn: 0.2536044 total: 2.04s remaining: 1.94s 512: learn: 0.2535278 total: 2.04s remaining: 1.94s 513: learn: 0.2534342 total: 2.05s remaining: 1.94s 514: learn: 0.2532736 total: 2.05s remaining: 1.93s 515: learn: 0.2532460 total: 2.06s remaining: 1.93s 516: learn: 0.2531386 total: 2.06s remaining: 1.92s 517: learn: 0.2530961 total: 2.06s remaining: 1.92s 518: learn: 0.2529731 total: 2.07s remaining: 1.92s 519: learn: 0.2528714 total: 2.07s remaining: 1.91s 520: learn: 0.2527061 total: 2.07s remaining: 1.91s 521: learn: 0.2526011 total: 2.08s remaining: 1.9s 522: learn: 0.2524705 total: 2.08s remaining: 1.9s 523: learn: 0.2523047 total: 2.08s remaining: 1.89s 524: learn: 0.2522012 total: 2.09s remaining: 1.89s 525: learn: 0.2520925 total: 2.09s remaining: 1.88s 526: learn: 0.2519891 total: 2.09s remaining: 1.88s 527: learn: 0.2519306 total: 2.1s remaining: 1.88s 528: learn: 0.2518493 total: 2.1s remaining: 1.87s 529: learn: 0.2517126 total: 2.1s remaining: 1.87s 530: learn: 0.2516118 total: 2.11s remaining: 1.86s 531: learn: 0.2514042 total: 2.11s remaining: 1.86s 532: learn: 0.2512979 total: 2.11s remaining: 1.85s 533: learn: 0.2511958 total: 2.12s remaining: 1.85s 534: learn: 0.2511486 total: 2.12s remaining: 1.84s 535: learn: 0.2510288 total: 2.13s remaining: 1.84s 536: learn: 0.2509958 total: 2.13s remaining: 1.83s 537: learn: 0.2508903 total: 2.13s remaining: 1.83s 538: learn: 0.2508427 total: 2.13s remaining: 1.83s 539: learn: 0.2507715 total: 2.14s remaining: 1.82s 540: learn: 0.2506199 total: 2.14s remaining: 1.82s 541: learn: 0.2504342 total: 2.15s remaining: 1.81s 542: learn: 0.2503692 total: 2.15s remaining: 1.81s 543: learn: 0.2502936 total: 2.15s remaining: 1.8s 544: learn: 0.2501876 total: 2.15s remaining: 1.8s 545: learn: 0.2500752 total: 2.16s remaining: 1.79s 546: learn: 0.2500206 total: 2.16s remaining: 1.79s 547: learn: 0.2499896 total: 2.17s remaining: 1.79s 548: learn: 0.2499198 total: 2.17s remaining: 1.78s 549: learn: 0.2498201 total: 2.17s remaining: 1.78s 550: learn: 0.2497809 total: 2.18s remaining: 1.77s 551: learn: 0.2497313 total: 2.18s remaining: 1.77s 552: learn: 0.2496849 total: 2.18s remaining: 1.76s 553: learn: 0.2496329 total: 2.19s remaining: 1.76s 554: learn: 0.2495320 total: 2.19s remaining: 1.76s 555: learn: 0.2494849 total: 2.19s remaining: 1.75s 556: learn: 0.2493756 total: 2.2s remaining: 1.75s 557: learn: 0.2492981 total: 2.21s remaining: 1.75s 558: learn: 0.2492083 total: 2.21s remaining: 1.74s 559: learn: 0.2491766 total: 2.21s remaining: 1.74s 560: learn: 0.2491082 total: 2.22s remaining: 1.73s 561: learn: 0.2488971 total: 2.22s remaining: 1.73s 562: learn: 0.2488561 total: 2.22s remaining: 1.73s 563: learn: 0.2488344 total: 2.23s remaining: 1.72s 564: learn: 0.2487705 total: 2.23s remaining: 1.72s 565: learn: 0.2486749 total: 2.23s remaining: 1.71s 566: learn: 0.2486267 total: 2.24s remaining: 1.71s 567: learn: 0.2485660 total: 2.24s remaining: 1.7s 568: learn: 0.2483922 total: 2.24s remaining: 1.7s 569: learn: 0.2482027 total: 2.25s remaining: 1.7s 570: learn: 0.2481674 total: 2.25s remaining: 1.69s 571: learn: 0.2480725 total: 2.25s remaining: 1.69s 572: learn: 0.2479129 total: 2.26s remaining: 1.68s 573: learn: 0.2477539 total: 2.26s remaining: 1.68s 574: learn: 0.2476504 total: 2.26s remaining: 1.67s 575: learn: 0.2475660 total: 2.27s remaining: 1.67s 576: learn: 0.2475242 total: 2.27s remaining: 1.67s 577: learn: 0.2474278 total: 2.27s remaining: 1.66s 578: learn: 0.2473724 total: 2.28s remaining: 1.66s 579: learn: 0.2472497 total: 2.28s remaining: 1.65s 580: learn: 0.2471914 total: 2.29s remaining: 1.65s 581: learn: 0.2471061 total: 2.29s remaining: 1.64s 582: learn: 0.2470749 total: 2.29s remaining: 1.64s 583: learn: 0.2469878 total: 2.29s remaining: 1.63s 584: learn: 0.2468849 total: 2.3s remaining: 1.63s 585: learn: 0.2467943 total: 2.3s remaining: 1.63s 586: learn: 0.2467631 total: 2.31s remaining: 1.62s 587: learn: 0.2466915 total: 2.31s remaining: 1.62s 588: learn: 0.2466732 total: 2.31s remaining: 1.61s 589: learn: 0.2466538 total: 2.31s remaining: 1.61s 590: learn: 0.2465670 total: 2.32s remaining: 1.6s 591: learn: 0.2465037 total: 2.32s remaining: 1.6s 592: learn: 0.2463053 total: 2.33s remaining: 1.59s 593: learn: 0.2462162 total: 2.33s remaining: 1.59s 594: learn: 0.2461612 total: 2.33s remaining: 1.59s 595: learn: 0.2460088 total: 2.33s remaining: 1.58s 596: learn: 0.2459558 total: 2.34s remaining: 1.58s 597: learn: 0.2458830 total: 2.34s remaining: 1.57s 598: learn: 0.2457733 total: 2.35s remaining: 1.57s 599: learn: 0.2457290 total: 2.35s remaining: 1.56s 600: learn: 0.2455918 total: 2.35s remaining: 1.56s 601: learn: 0.2455228 total: 2.35s remaining: 1.56s 602: learn: 0.2453833 total: 2.37s remaining: 1.56s 603: learn: 0.2453010 total: 2.37s remaining: 1.55s 604: learn: 0.2452441 total: 2.38s remaining: 1.55s 605: learn: 0.2450812 total: 2.38s remaining: 1.55s 606: learn: 0.2449523 total: 2.39s remaining: 1.54s 607: learn: 0.2448494 total: 2.39s remaining: 1.54s 608: learn: 0.2447942 total: 2.39s remaining: 1.54s 609: learn: 0.2447450 total: 2.4s remaining: 1.53s 610: learn: 0.2446708 total: 2.4s remaining: 1.53s 611: learn: 0.2446070 total: 2.4s remaining: 1.52s 612: learn: 0.2445356 total: 2.41s remaining: 1.52s 613: learn: 0.2444969 total: 2.41s remaining: 1.52s 614: learn: 0.2443947 total: 2.42s remaining: 1.51s 615: learn: 0.2443511 total: 2.42s remaining: 1.51s 616: learn: 0.2442655 total: 2.42s remaining: 1.5s 617: learn: 0.2441727 total: 2.42s remaining: 1.5s 618: learn: 0.2441216 total: 2.43s remaining: 1.5s 619: learn: 0.2440844 total: 2.43s remaining: 1.49s 620: learn: 0.2439086 total: 2.44s remaining: 1.49s 621: learn: 0.2438168 total: 2.44s remaining: 1.48s 622: learn: 0.2437303 total: 2.44s remaining: 1.48s 623: learn: 0.2436552 total: 2.44s remaining: 1.47s 624: learn: 0.2435965 total: 2.45s remaining: 1.47s 625: learn: 0.2435475 total: 2.45s remaining: 1.47s 626: learn: 0.2434712 total: 2.46s remaining: 1.46s 627: learn: 0.2433981 total: 2.46s remaining: 1.46s 628: learn: 0.2433402 total: 2.46s remaining: 1.45s 629: learn: 0.2432356 total: 2.46s remaining: 1.45s 630: learn: 0.2431487 total: 2.47s remaining: 1.44s 631: learn: 0.2430713 total: 2.47s remaining: 1.44s 632: learn: 0.2430172 total: 2.48s remaining: 1.44s 633: learn: 0.2429168 total: 2.48s remaining: 1.43s 634: learn: 0.2428492 total: 2.48s remaining: 1.43s 635: learn: 0.2426868 total: 2.49s remaining: 1.42s 636: learn: 0.2426157 total: 2.49s remaining: 1.42s 637: learn: 0.2425227 total: 2.49s remaining: 1.41s 638: learn: 0.2424132 total: 2.5s remaining: 1.41s 639: learn: 0.2423204 total: 2.5s remaining: 1.41s 640: learn: 0.2422937 total: 2.5s remaining: 1.4s 641: learn: 0.2422384 total: 2.51s remaining: 1.4s 642: learn: 0.2422020 total: 2.51s remaining: 1.39s 643: learn: 0.2420620 total: 2.51s remaining: 1.39s 644: learn: 0.2420113 total: 2.52s remaining: 1.39s 645: learn: 0.2419528 total: 2.52s remaining: 1.38s 646: learn: 0.2418311 total: 2.52s remaining: 1.38s 647: learn: 0.2417693 total: 2.53s remaining: 1.37s 648: learn: 0.2417405 total: 2.53s remaining: 1.37s 649: learn: 0.2416606 total: 2.54s remaining: 1.37s 650: learn: 0.2415998 total: 2.55s remaining: 1.37s 651: learn: 0.2415504 total: 2.55s remaining: 1.36s 652: learn: 0.2414900 total: 2.56s remaining: 1.36s 653: learn: 0.2414158 total: 2.56s remaining: 1.35s 654: learn: 0.2413029 total: 2.56s remaining: 1.35s 655: learn: 0.2412291 total: 2.57s remaining: 1.35s 656: learn: 0.2411556 total: 2.57s remaining: 1.34s 657: learn: 0.2411215 total: 2.57s remaining: 1.34s 658: learn: 0.2410754 total: 2.58s remaining: 1.33s 659: learn: 0.2410379 total: 2.58s remaining: 1.33s 660: learn: 0.2410056 total: 2.58s remaining: 1.32s 661: learn: 0.2408910 total: 2.59s remaining: 1.32s 662: learn: 0.2408256 total: 2.59s remaining: 1.32s 663: learn: 0.2407879 total: 2.59s remaining: 1.31s 664: learn: 0.2407037 total: 2.6s remaining: 1.31s 665: learn: 0.2406197 total: 2.6s remaining: 1.3s 666: learn: 0.2405769 total: 2.6s remaining: 1.3s 667: learn: 0.2405318 total: 2.61s remaining: 1.3s 668: learn: 0.2404607 total: 2.61s remaining: 1.29s 669: learn: 0.2403774 total: 2.62s remaining: 1.29s 670: learn: 0.2403291 total: 2.62s remaining: 1.28s 671: learn: 0.2402350 total: 2.62s remaining: 1.28s 672: learn: 0.2401623 total: 2.63s remaining: 1.27s 673: learn: 0.2401350 total: 2.63s remaining: 1.27s 674: learn: 0.2400081 total: 2.63s remaining: 1.27s 675: learn: 0.2399233 total: 2.64s remaining: 1.26s 676: learn: 0.2398712 total: 2.64s remaining: 1.26s 677: learn: 0.2397867 total: 2.64s remaining: 1.25s 678: learn: 0.2397563 total: 2.65s remaining: 1.25s 679: learn: 0.2396862 total: 2.65s remaining: 1.25s 680: learn: 0.2396441 total: 2.65s remaining: 1.24s 681: learn: 0.2395835 total: 2.66s remaining: 1.24s 682: learn: 0.2395359 total: 2.66s remaining: 1.23s 683: learn: 0.2393955 total: 2.66s remaining: 1.23s 684: learn: 0.2393346 total: 2.67s remaining: 1.23s 685: learn: 0.2391722 total: 2.67s remaining: 1.22s 686: learn: 0.2391066 total: 2.68s remaining: 1.22s 687: learn: 0.2390196 total: 2.68s remaining: 1.22s 688: learn: 0.2389558 total: 2.69s remaining: 1.21s 689: learn: 0.2388105 total: 2.69s remaining: 1.21s 690: learn: 0.2387642 total: 2.69s remaining: 1.2s 691: learn: 0.2387261 total: 2.7s remaining: 1.2s 692: learn: 0.2386991 total: 2.71s remaining: 1.2s 693: learn: 0.2386398 total: 2.71s remaining: 1.2s 694: learn: 0.2385865 total: 2.72s remaining: 1.19s 695: learn: 0.2385332 total: 2.72s remaining: 1.19s 696: learn: 0.2384459 total: 2.73s remaining: 1.19s 697: learn: 0.2383879 total: 2.73s remaining: 1.18s 698: learn: 0.2383028 total: 2.73s remaining: 1.18s 699: learn: 0.2382112 total: 2.74s remaining: 1.17s 700: learn: 0.2380977 total: 2.74s remaining: 1.17s 701: learn: 0.2380799 total: 2.74s remaining: 1.16s 702: learn: 0.2379976 total: 2.75s remaining: 1.16s 703: learn: 0.2379199 total: 2.75s remaining: 1.16s 704: learn: 0.2378439 total: 2.75s remaining: 1.15s 705: learn: 0.2377861 total: 2.76s remaining: 1.15s 706: learn: 0.2377024 total: 2.76s remaining: 1.14s 707: learn: 0.2376318 total: 2.76s remaining: 1.14s 708: learn: 0.2375522 total: 2.77s remaining: 1.14s 709: learn: 0.2375134 total: 2.77s remaining: 1.13s 710: learn: 0.2374893 total: 2.77s remaining: 1.13s 711: learn: 0.2373964 total: 2.78s remaining: 1.12s 712: learn: 0.2373353 total: 2.78s remaining: 1.12s 713: learn: 0.2372972 total: 2.78s remaining: 1.11s 714: learn: 0.2372542 total: 2.79s remaining: 1.11s 715: learn: 0.2371346 total: 2.79s remaining: 1.11s 716: learn: 0.2370029 total: 2.79s remaining: 1.1s 717: learn: 0.2369417 total: 2.8s remaining: 1.1s 718: learn: 0.2369117 total: 2.8s remaining: 1.09s 719: learn: 0.2368507 total: 2.8s remaining: 1.09s 720: learn: 0.2367827 total: 2.81s remaining: 1.09s 721: learn: 0.2367051 total: 2.81s remaining: 1.08s 722: learn: 0.2365816 total: 2.81s remaining: 1.08s 723: learn: 0.2365129 total: 2.82s remaining: 1.07s 724: learn: 0.2364712 total: 2.82s remaining: 1.07s 725: learn: 0.2364213 total: 2.82s remaining: 1.07s 726: learn: 0.2363554 total: 2.83s remaining: 1.06s 727: learn: 0.2363213 total: 2.83s remaining: 1.06s 728: learn: 0.2362223 total: 2.83s remaining: 1.05s 729: learn: 0.2361617 total: 2.84s remaining: 1.05s 730: learn: 0.2360921 total: 2.84s remaining: 1.04s 731: learn: 0.2360441 total: 2.84s remaining: 1.04s 732: learn: 0.2359741 total: 2.85s remaining: 1.04s 733: learn: 0.2359376 total: 2.85s remaining: 1.03s 734: learn: 0.2358921 total: 2.85s remaining: 1.03s 735: learn: 0.2358205 total: 2.86s remaining: 1.02s 736: learn: 0.2357948 total: 2.86s remaining: 1.02s 737: learn: 0.2356901 total: 2.87s remaining: 1.02s 738: learn: 0.2356588 total: 2.87s remaining: 1.01s 739: learn: 0.2356184 total: 2.88s remaining: 1.01s 740: learn: 0.2355779 total: 2.88s remaining: 1.01s 741: learn: 0.2355099 total: 2.89s remaining: 1s 742: learn: 0.2354941 total: 2.89s remaining: 1000ms 743: learn: 0.2354081 total: 2.89s remaining: 996ms 744: learn: 0.2353438 total: 2.9s remaining: 992ms 745: learn: 0.2352402 total: 2.9s remaining: 988ms 746: learn: 0.2351879 total: 2.9s remaining: 984ms 747: learn: 0.2351537 total: 2.91s remaining: 980ms 748: learn: 0.2351078 total: 2.91s remaining: 975ms 749: learn: 0.2350131 total: 2.91s remaining: 971ms 750: learn: 0.2349872 total: 2.92s remaining: 968ms 751: learn: 0.2349553 total: 2.92s remaining: 963ms 752: learn: 0.2348711 total: 2.92s remaining: 959ms 753: learn: 0.2347731 total: 2.93s remaining: 955ms 754: learn: 0.2346777 total: 2.93s remaining: 951ms 755: learn: 0.2345688 total: 2.94s remaining: 947ms 756: learn: 0.2345180 total: 2.94s remaining: 943ms 757: learn: 0.2344879 total: 2.94s remaining: 939ms 758: learn: 0.2344386 total: 2.94s remaining: 935ms 759: learn: 0.2344047 total: 2.95s remaining: 931ms 760: learn: 0.2343220 total: 2.95s remaining: 927ms 761: learn: 0.2341886 total: 2.96s remaining: 923ms 762: learn: 0.2341144 total: 2.96s remaining: 919ms 763: learn: 0.2340828 total: 2.96s remaining: 915ms 764: learn: 0.2340420 total: 2.96s remaining: 911ms 765: learn: 0.2340147 total: 2.97s remaining: 907ms 766: learn: 0.2339248 total: 2.97s remaining: 903ms 767: learn: 0.2338438 total: 2.98s remaining: 899ms 768: learn: 0.2337621 total: 2.98s remaining: 895ms 769: learn: 0.2336849 total: 2.98s remaining: 891ms 770: learn: 0.2336155 total: 2.99s remaining: 887ms 771: learn: 0.2335603 total: 2.99s remaining: 883ms 772: learn: 0.2335164 total: 2.99s remaining: 879ms 773: learn: 0.2333970 total: 3s remaining: 875ms 774: learn: 0.2333200 total: 3s remaining: 871ms 775: learn: 0.2332879 total: 3s remaining: 867ms 776: learn: 0.2332164 total: 3s remaining: 863ms 777: learn: 0.2331154 total: 3.01s remaining: 859ms 778: learn: 0.2330706 total: 3.01s remaining: 855ms 779: learn: 0.2329407 total: 3.02s remaining: 851ms 780: learn: 0.2329117 total: 3.02s remaining: 847ms 781: learn: 0.2328747 total: 3.02s remaining: 843ms 782: learn: 0.2327929 total: 3.02s remaining: 839ms 783: learn: 0.2327207 total: 3.03s remaining: 835ms 784: learn: 0.2326402 total: 3.03s remaining: 831ms 785: learn: 0.2325956 total: 3.04s remaining: 827ms 786: learn: 0.2325591 total: 3.04s remaining: 823ms 787: learn: 0.2324922 total: 3.04s remaining: 819ms 788: learn: 0.2324347 total: 3.05s remaining: 816ms 789: learn: 0.2324181 total: 3.06s remaining: 812ms 790: learn: 0.2323481 total: 3.06s remaining: 808ms 791: learn: 0.2323012 total: 3.06s remaining: 804ms 792: learn: 0.2322299 total: 3.06s remaining: 800ms 793: learn: 0.2321751 total: 3.07s remaining: 797ms 794: learn: 0.2321034 total: 3.07s remaining: 793ms 795: learn: 0.2320611 total: 3.08s remaining: 789ms 796: learn: 0.2320121 total: 3.08s remaining: 785ms 797: learn: 0.2319587 total: 3.08s remaining: 781ms 798: learn: 0.2318608 total: 3.09s remaining: 777ms 799: learn: 0.2317951 total: 3.09s remaining: 773ms 800: learn: 0.2317448 total: 3.09s remaining: 769ms 801: learn: 0.2316364 total: 3.1s remaining: 765ms 802: learn: 0.2315962 total: 3.1s remaining: 761ms 803: learn: 0.2314914 total: 3.1s remaining: 757ms 804: learn: 0.2313972 total: 3.11s remaining: 753ms 805: learn: 0.2313350 total: 3.11s remaining: 749ms 806: learn: 0.2312792 total: 3.11s remaining: 745ms 807: learn: 0.2312244 total: 3.12s remaining: 741ms 808: learn: 0.2311161 total: 3.12s remaining: 737ms 809: learn: 0.2310438 total: 3.12s remaining: 733ms 810: learn: 0.2309989 total: 3.13s remaining: 729ms 811: learn: 0.2309674 total: 3.13s remaining: 725ms 812: learn: 0.2308942 total: 3.13s remaining: 721ms 813: learn: 0.2308612 total: 3.14s remaining: 717ms 814: learn: 0.2307232 total: 3.14s remaining: 713ms 815: learn: 0.2307034 total: 3.14s remaining: 709ms 816: learn: 0.2306134 total: 3.15s remaining: 705ms 817: learn: 0.2305735 total: 3.15s remaining: 701ms 818: learn: 0.2305306 total: 3.15s remaining: 697ms 819: learn: 0.2304464 total: 3.16s remaining: 693ms 820: learn: 0.2303827 total: 3.16s remaining: 689ms 821: learn: 0.2303182 total: 3.17s remaining: 685ms 822: learn: 0.2302646 total: 3.17s remaining: 681ms 823: learn: 0.2302014 total: 3.17s remaining: 677ms 824: learn: 0.2301247 total: 3.17s remaining: 674ms 825: learn: 0.2300737 total: 3.18s remaining: 670ms 826: learn: 0.2300264 total: 3.18s remaining: 666ms 827: learn: 0.2299775 total: 3.19s remaining: 662ms 828: learn: 0.2299043 total: 3.19s remaining: 658ms 829: learn: 0.2298450 total: 3.19s remaining: 654ms 830: learn: 0.2297777 total: 3.19s remaining: 650ms 831: learn: 0.2297360 total: 3.2s remaining: 646ms 832: learn: 0.2296486 total: 3.2s remaining: 642ms 833: learn: 0.2295955 total: 3.21s remaining: 638ms 834: learn: 0.2295600 total: 3.22s remaining: 636ms 835: learn: 0.2295173 total: 3.22s remaining: 632ms 836: learn: 0.2294744 total: 3.23s remaining: 628ms 837: learn: 0.2293896 total: 3.23s remaining: 624ms 838: learn: 0.2293186 total: 3.23s remaining: 620ms 839: learn: 0.2292468 total: 3.24s remaining: 617ms 840: learn: 0.2291383 total: 3.24s remaining: 613ms 841: learn: 0.2290439 total: 3.24s remaining: 609ms 842: learn: 0.2289866 total: 3.25s remaining: 605ms 843: learn: 0.2289602 total: 3.25s remaining: 601ms 844: learn: 0.2289055 total: 3.25s remaining: 597ms 845: learn: 0.2288403 total: 3.26s remaining: 593ms 846: learn: 0.2287293 total: 3.26s remaining: 589ms 847: learn: 0.2286705 total: 3.26s remaining: 585ms 848: learn: 0.2286118 total: 3.27s remaining: 581ms 849: learn: 0.2285846 total: 3.27s remaining: 577ms 850: learn: 0.2284978 total: 3.27s remaining: 573ms 851: learn: 0.2284598 total: 3.28s remaining: 569ms 852: learn: 0.2284242 total: 3.28s remaining: 565ms 853: learn: 0.2282967 total: 3.28s remaining: 561ms 854: learn: 0.2282395 total: 3.29s remaining: 557ms 855: learn: 0.2282106 total: 3.29s remaining: 553ms 856: learn: 0.2280312 total: 3.29s remaining: 550ms 857: learn: 0.2279443 total: 3.3s remaining: 546ms 858: learn: 0.2278920 total: 3.3s remaining: 542ms 859: learn: 0.2278741 total: 3.3s remaining: 538ms 860: learn: 0.2277940 total: 3.31s remaining: 534ms 861: learn: 0.2277681 total: 3.31s remaining: 530ms 862: learn: 0.2277514 total: 3.31s remaining: 526ms 863: learn: 0.2276923 total: 3.32s remaining: 522ms 864: learn: 0.2276445 total: 3.32s remaining: 518ms 865: learn: 0.2275832 total: 3.32s remaining: 514ms 866: learn: 0.2274859 total: 3.33s remaining: 510ms 867: learn: 0.2274115 total: 3.33s remaining: 506ms 868: learn: 0.2273125 total: 3.33s remaining: 502ms 869: learn: 0.2272424 total: 3.34s remaining: 499ms 870: learn: 0.2271871 total: 3.34s remaining: 495ms 871: learn: 0.2271468 total: 3.34s remaining: 491ms 872: learn: 0.2270862 total: 3.35s remaining: 487ms 873: learn: 0.2270401 total: 3.35s remaining: 483ms 874: learn: 0.2269979 total: 3.35s remaining: 479ms 875: learn: 0.2269728 total: 3.36s remaining: 475ms 876: learn: 0.2269559 total: 3.36s remaining: 471ms 877: learn: 0.2268981 total: 3.36s remaining: 467ms 878: learn: 0.2268552 total: 3.37s remaining: 464ms 879: learn: 0.2268067 total: 3.37s remaining: 460ms 880: learn: 0.2267585 total: 3.38s remaining: 456ms 881: learn: 0.2267129 total: 3.38s remaining: 452ms 882: learn: 0.2266602 total: 3.38s remaining: 449ms 883: learn: 0.2266212 total: 3.39s remaining: 445ms 884: learn: 0.2265772 total: 3.39s remaining: 441ms 885: learn: 0.2264870 total: 3.4s remaining: 437ms 886: learn: 0.2264143 total: 3.4s remaining: 433ms 887: learn: 0.2263955 total: 3.4s remaining: 430ms 888: learn: 0.2263224 total: 3.41s remaining: 426ms 889: learn: 0.2262519 total: 3.41s remaining: 422ms 890: learn: 0.2261926 total: 3.42s remaining: 418ms 891: learn: 0.2261109 total: 3.42s remaining: 414ms 892: learn: 0.2260650 total: 3.42s remaining: 410ms 893: learn: 0.2259827 total: 3.43s remaining: 406ms 894: learn: 0.2259044 total: 3.43s remaining: 402ms 895: learn: 0.2258391 total: 3.43s remaining: 399ms 896: learn: 0.2258086 total: 3.44s remaining: 395ms 897: learn: 0.2257487 total: 3.44s remaining: 391ms 898: learn: 0.2256822 total: 3.44s remaining: 387ms 899: learn: 0.2256174 total: 3.45s remaining: 383ms 900: learn: 0.2255763 total: 3.45s remaining: 379ms 901: learn: 0.2254361 total: 3.45s remaining: 375ms 902: learn: 0.2253853 total: 3.46s remaining: 371ms 903: learn: 0.2253509 total: 3.46s remaining: 368ms 904: learn: 0.2253232 total: 3.46s remaining: 364ms 905: learn: 0.2252947 total: 3.47s remaining: 360ms 906: learn: 0.2251800 total: 3.47s remaining: 356ms 907: learn: 0.2251546 total: 3.48s remaining: 352ms 908: learn: 0.2251112 total: 3.48s remaining: 348ms 909: learn: 0.2250613 total: 3.48s remaining: 344ms 910: learn: 0.2250116 total: 3.48s remaining: 340ms 911: learn: 0.2249812 total: 3.49s remaining: 337ms 912: learn: 0.2249634 total: 3.49s remaining: 333ms 913: learn: 0.2249463 total: 3.5s remaining: 329ms 914: learn: 0.2248835 total: 3.5s remaining: 325ms 915: learn: 0.2248133 total: 3.5s remaining: 321ms 916: learn: 0.2247681 total: 3.51s remaining: 318ms 917: learn: 0.2247410 total: 3.51s remaining: 314ms 918: learn: 0.2247057 total: 3.51s remaining: 310ms 919: learn: 0.2246738 total: 3.52s remaining: 306ms 920: learn: 0.2246598 total: 3.52s remaining: 302ms 921: learn: 0.2246197 total: 3.52s remaining: 298ms 922: learn: 0.2245826 total: 3.53s remaining: 294ms 923: learn: 0.2245224 total: 3.53s remaining: 290ms 924: learn: 0.2244875 total: 3.53s remaining: 287ms 925: learn: 0.2244352 total: 3.54s remaining: 283ms 926: learn: 0.2243778 total: 3.55s remaining: 279ms 927: learn: 0.2243162 total: 3.55s remaining: 276ms 928: learn: 0.2242499 total: 3.56s remaining: 272ms 929: learn: 0.2241920 total: 3.56s remaining: 268ms 930: learn: 0.2241621 total: 3.56s remaining: 264ms 931: learn: 0.2241300 total: 3.57s remaining: 260ms 932: learn: 0.2240920 total: 3.57s remaining: 257ms 933: learn: 0.2240424 total: 3.58s remaining: 253ms 934: learn: 0.2240127 total: 3.58s remaining: 249ms 935: learn: 0.2239260 total: 3.58s remaining: 245ms 936: learn: 0.2238490 total: 3.59s remaining: 241ms 937: learn: 0.2237482 total: 3.59s remaining: 237ms 938: learn: 0.2236843 total: 3.6s remaining: 234ms 939: learn: 0.2236358 total: 3.6s remaining: 230ms 940: learn: 0.2235534 total: 3.6s remaining: 226ms 941: learn: 0.2235226 total: 3.6s remaining: 222ms 942: learn: 0.2234306 total: 3.61s remaining: 218ms 943: learn: 0.2233572 total: 3.61s remaining: 214ms 944: learn: 0.2233009 total: 3.62s remaining: 211ms 945: learn: 0.2232260 total: 3.62s remaining: 207ms 946: learn: 0.2232114 total: 3.63s remaining: 203ms 947: learn: 0.2231428 total: 3.63s remaining: 199ms 948: learn: 0.2230701 total: 3.64s remaining: 195ms 949: learn: 0.2230146 total: 3.64s remaining: 192ms 950: learn: 0.2229664 total: 3.64s remaining: 188ms 951: learn: 0.2228902 total: 3.65s remaining: 184ms 952: learn: 0.2228463 total: 3.65s remaining: 180ms 953: learn: 0.2227739 total: 3.65s remaining: 176ms 954: learn: 0.2227274 total: 3.66s remaining: 172ms 955: learn: 0.2226493 total: 3.66s remaining: 169ms 956: learn: 0.2226064 total: 3.67s remaining: 165ms 957: learn: 0.2225584 total: 3.67s remaining: 161ms 958: learn: 0.2224575 total: 3.67s remaining: 157ms 959: learn: 0.2224004 total: 3.68s remaining: 153ms 960: learn: 0.2223019 total: 3.68s remaining: 149ms 961: learn: 0.2222506 total: 3.68s remaining: 146ms 962: learn: 0.2222184 total: 3.69s remaining: 142ms 963: learn: 0.2222020 total: 3.69s remaining: 138ms 964: learn: 0.2221731 total: 3.69s remaining: 134ms 965: learn: 0.2220978 total: 3.7s remaining: 130ms 966: learn: 0.2220579 total: 3.7s remaining: 126ms 967: learn: 0.2219697 total: 3.71s remaining: 122ms 968: learn: 0.2219505 total: 3.71s remaining: 119ms 969: learn: 0.2218993 total: 3.71s remaining: 115ms 970: learn: 0.2218343 total: 3.72s remaining: 111ms 971: learn: 0.2217700 total: 3.73s remaining: 107ms 972: learn: 0.2217090 total: 3.73s remaining: 104ms 973: learn: 0.2216624 total: 3.74s remaining: 99.8ms 974: learn: 0.2216004 total: 3.74s remaining: 96ms 975: learn: 0.2215534 total: 3.75s remaining: 92.1ms 976: learn: 0.2215088 total: 3.75s remaining: 88.3ms 977: learn: 0.2214762 total: 3.75s remaining: 84.4ms 978: learn: 0.2214421 total: 3.76s remaining: 80.6ms 979: learn: 0.2213411 total: 3.76s remaining: 76.7ms 980: learn: 0.2213122 total: 3.76s remaining: 72.9ms 981: learn: 0.2212288 total: 3.77s remaining: 69ms 982: learn: 0.2211355 total: 3.77s remaining: 65.2ms 983: learn: 0.2210839 total: 3.77s remaining: 61.4ms 984: learn: 0.2210519 total: 3.78s remaining: 57.5ms 985: learn: 0.2210075 total: 3.78s remaining: 53.7ms 986: learn: 0.2209731 total: 3.78s remaining: 49.8ms 987: learn: 0.2208895 total: 3.79s remaining: 46ms 988: learn: 0.2208325 total: 3.79s remaining: 42.2ms 989: learn: 0.2207732 total: 3.79s remaining: 38.3ms 990: learn: 0.2207394 total: 3.8s remaining: 34.5ms 991: learn: 0.2206998 total: 3.8s remaining: 30.7ms 992: learn: 0.2206517 total: 3.8s remaining: 26.8ms 993: learn: 0.2206289 total: 3.81s remaining: 23ms 994: learn: 0.2206081 total: 3.81s remaining: 19.1ms 995: learn: 0.2205787 total: 3.81s remaining: 15.3ms 996: learn: 0.2205147 total: 3.82s remaining: 11.5ms 997: learn: 0.2204967 total: 3.83s remaining: 7.67ms 998: learn: 0.2204459 total: 3.83s remaining: 3.83ms 999: learn: 0.2203435 total: 3.83s remaining: 0us
results=pd.DataFrame(results)
results
model name | model | train score | r2 score | time(s) | pred | |
---|---|---|---|---|---|---|
0 | linear regression | LinearRegression(copy_X=True, fit_intercept=Tr... | 0.500388 | 0.480724 | 0.006996 | [2.434628639479911, 2.590508036061692, 2.68828... |
1 | random forest | (DecisionTreeRegressor(ccp_alpha=0.0, criterio... | 0.992913 | 0.937696 | 3.094273 | [3.2383521559984505, 2.9828822541320825, 2.794... |
2 | xgboost | XGBRegressor(base_score=0.5, booster='gbtree',... | 0.935225 | 0.929006 | 0.719587 | [3.2984083, 3.059027, 2.460107, 2.0270689, 1.2... |
3 | catboost | <catboost.core.CatBoostRegressor object at 0x0... | 0.976178 | 0.946864 | 4.511344 | [3.369075010771735, 3.1474646549164893, 2.8587... |
4 | light gradient boost | LGBMRegressor(boosting_type='gbdt', class_weig... | 0.967614 | 0.946496 | 0.137940 | [3.2693240619329615, 3.033266148470302, 2.6433... |
#plot ypred & ytrue
fig,axs=plt.subplots(4,1,figsize=(20,20))
for i in range(len(axs)):
m=results.model[i]
axs[i].plot(range(len(y_test.iloc[:500])),y_test.iloc[:500],label="actual",color='r')
axs[i].plot(range(len(y_test.iloc[:500])),m.predict(x_test.iloc[:500]),label="predicted",color='b')
axs[i].set_title(results['model name'][i])
axs[i].legend()
features =(df.drop(columns=['casual_log','registered_log','count_log']).columns).tolist()
import pycaret
from pycaret.regression import *
reg= setup(df.drop(columns=['casual_log','registered_log']),target ='count_log',numeric_features=features, session_id=786)
Description | Value | |
---|---|---|
0 | session_id | 786 |
1 | Target | count_log |
2 | Original Data | (10886, 16) |
3 | Missing Values | False |
4 | Numeric Features | 15 |
5 | Categorical Features | 0 |
6 | Ordinal Features | False |
7 | High Cardinality Features | False |
8 | High Cardinality Method | None |
9 | Transformed Train Set | (7620, 15) |
10 | Transformed Test Set | (3266, 15) |
11 | Shuffle Train-Test | True |
12 | Stratify Train-Test | False |
13 | Fold Generator | KFold |
14 | Fold Number | 10 |
15 | CPU Jobs | -1 |
16 | Use GPU | False |
17 | Log Experiment | False |
18 | Experiment Name | reg-default-name |
19 | USI | 3640 |
20 | Imputation Type | simple |
21 | Iterative Imputation Iteration | None |
22 | Numeric Imputer | mean |
23 | Iterative Imputation Numeric Model | None |
24 | Categorical Imputer | constant |
25 | Iterative Imputation Categorical Model | None |
26 | Unknown Categoricals Handling | least_frequent |
27 | Normalize | False |
28 | Normalize Method | None |
29 | Transformation | False |
30 | Transformation Method | None |
31 | PCA | False |
32 | PCA Method | None |
33 | PCA Components | None |
34 | Ignore Low Variance | False |
35 | Combine Rare Levels | False |
36 | Rare Level Threshold | None |
37 | Numeric Binning | False |
38 | Remove Outliers | False |
39 | Outliers Threshold | None |
40 | Remove Multicollinearity | False |
41 | Multicollinearity Threshold | None |
42 | Clustering | False |
43 | Clustering Iteration | None |
44 | Polynomial Features | False |
45 | Polynomial Degree | None |
46 | Trignometry Features | False |
47 | Polynomial Threshold | None |
48 | Group Features | False |
49 | Feature Selection | False |
50 | Features Selection Threshold | None |
51 | Feature Interaction | False |
52 | Feature Ratio | False |
53 | Interaction Threshold | None |
54 | Transform Target | False |
55 | Transform Target Method | box-cox |
compare_models()
Model | MAE | MSE | RMSE | R2 | RMSLE | MAPE | TT (Sec) | |
---|---|---|---|---|---|---|---|---|
catboost | CatBoost Regressor | 0.2024 | 0.0866 | 0.294 | 0.9569 | 0.08 | 0.0682 | 5.336 |
lightgbm | Light Gradient Boosting Machine | 0.2151 | 0.0952 | 0.3083 | 0.9526 | 0.082 | 0.0719 | 0.081 |
rf | Random Forest Regressor | 0.226 | 0.1098 | 0.3307 | 0.9455 | 0.0876 | 0.0759 | 1.493 |
et | Extra Trees Regressor | 0.2258 | 0.1102 | 0.3316 | 0.9452 | 0.0889 | 0.076 | 1.14 |
gbr | Gradient Boosting Regressor | 0.2849 | 0.1481 | 0.3847 | 0.9263 | 0.0957 | 0.0905 | 0.295 |
dt | Decision Tree Regressor | 0.31 | 0.2115 | 0.4592 | 0.8949 | 0.1195 | 0.1008 | 0.03 |
ada | AdaBoost Regressor | 0.5312 | 0.4324 | 0.6573 | 0.7848 | 0.1473 | 0.1549 | 0.236 |
knn | K Neighbors Regressor | 0.5592 | 0.5726 | 0.756 | 0.7153 | 0.1785 | 0.1807 | 0.098 |
ridge | Ridge Regression | 0.7949 | 1.016 | 1.008 | 0.4948 | 0.2291 | 0.2604 | 0.311 |
br | Bayesian Ridge | 0.7949 | 1.016 | 1.008 | 0.4948 | 0.2291 | 0.2605 | 0.057 |
lr | Linear Regression | 0.7949 | 1.016 | 1.008 | 0.4947 | 0.2291 | 0.2604 | 0.823 |
huber | Huber Regressor | 0.8088 | 1.075 | 1.036 | 0.4657 | 0.2373 | 0.2723 | 0.14 |
en | Elastic Net | 0.8372 | 1.114 | 1.055 | 0.4465 | 0.241 | 0.2807 | 0.021 |
lasso | Lasso Regression | 0.8518 | 1.151 | 1.073 | 0.4278 | 0.2464 | 0.2899 | 0.017 |
omp | Orthogonal Matching Pursuit | 0.9647 | 1.364 | 1.168 | 0.3222 | 0.2576 | 0.3137 | 0.015 |
par | Passive Aggressive Regressor | 1.413 | 3.124 | 1.661 | -0.5247 | 0.3753 | 0.4002 | 0.024 |
<catboost.core.CatBoostRegressor at 0x236ea4fba48>
dt=create_model(estimator='rf')
MAE | MSE | RMSE | R2 | RMSLE | MAPE | |
---|---|---|---|---|---|---|
0 | 0.2187 | 0.1018 | 0.319 | 0.9471 | 0.084 | 0.0712 |
1 | 0.2246 | 0.1037 | 0.3221 | 0.9469 | 0.0817 | 0.0708 |
2 | 0.2377 | 0.118 | 0.3435 | 0.9447 | 0.0916 | 0.0806 |
3 | 0.2457 | 0.1299 | 0.3604 | 0.9384 | 0.0977 | 0.0873 |
4 | 0.2438 | 0.1297 | 0.3602 | 0.9384 | 0.0986 | 0.0851 |
5 | 0.2131 | 0.0923 | 0.3038 | 0.9531 | 0.0808 | 0.0702 |
6 | 0.211 | 0.1019 | 0.3192 | 0.9474 | 0.0829 | 0.0698 |
7 | 0.218 | 0.1042 | 0.3228 | 0.9445 | 0.0823 | 0.0688 |
8 | 0.24 | 0.124 | 0.3522 | 0.9384 | 0.0931 | 0.0831 |
9 | 0.2069 | 0.0921 | 0.3035 | 0.9563 | 0.0837 | 0.0719 |
Mean | 0.226 | 0.1098 | 0.3307 | 0.9455 | 0.0876 | 0.0759 |
SD | 0.0138 | 0.0137 | 0.0206 | 0.0058 | 0.0065 | 0.0069 |
plot_model(estimator = dt, plot = 'learning')
bike_test_data.head()
datetime | season | holiday | workingday | weather | temp | atemp | humidity | windspeed | |
---|---|---|---|---|---|---|---|---|---|
0 | 2011-01-20 00:00:00 | 1 | 0 | 1 | 1 | 10.66 | 11.365 | 56 | 26.0027 |
1 | 2011-01-20 01:00:00 | 1 | 0 | 1 | 1 | 10.66 | 13.635 | 56 | 0.0000 |
2 | 2011-01-20 02:00:00 | 1 | 0 | 1 | 1 | 10.66 | 13.635 | 56 | 0.0000 |
3 | 2011-01-20 03:00:00 | 1 | 0 | 1 | 1 | 10.66 | 12.880 | 56 | 11.0014 |
4 | 2011-01-20 04:00:00 | 1 | 0 | 1 | 1 | 10.66 | 12.880 | 56 | 11.0014 |
df1=bike_test_data.copy()
df1['datetime']=pd.to_datetime(df1['datetime'],format='%Y-%m-%d %H:%M:%S')
#add new columns from the datetime column
df1['windspeed_log']=np.log((1+df1['weather']))
df1['year'] = df1['datetime'].dt.year
df1['month'] = df1['datetime'].dt.month
df1['day'] = df1['datetime'].dt.day
df1['hour'] = df1['datetime'].dt.hour
df1['dayofweek'] = df1['datetime'].dt.dayofweek
df1.drop(['datetime'],axis=1,inplace=True)
df1['weekend']=df1['dayofweek'].apply(lambda x : 1 if x>=5 else 0)
df1.drop(columns=['atemp','dayofweek','holiday',"windspeed"],inplace=True)
categorical_columns=[ "season", "weather"]
df_cat=one_hot_encode_cagtegorical_col(df1,categorical_columns)
numerical_col_list =np.setdiff1d(df1.columns.to_list(), categorical_columns).tolist()
df1=pd.concat([df1[numerical_col_list],df_cat],axis=1)
df1.head()
day | hour | humidity | month | temp | weekend | windspeed_log | workingday | year | season_2 | season_3 | season_4 | weather_2 | weather_3 | weather_4 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 20 | 0 | 56 | 1 | 10.66 | 0 | 0.693147 | 1 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
1 | 20 | 1 | 56 | 1 | 10.66 | 0 | 0.693147 | 1 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
2 | 20 | 2 | 56 | 1 | 10.66 | 0 | 0.693147 | 1 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
3 | 20 | 3 | 56 | 1 | 10.66 | 0 | 0.693147 | 1 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
4 | 20 | 4 | 56 | 1 | 10.66 | 0 | 0.693147 | 1 | 2011 | 0 | 0 | 0 | 0 | 0 | 0 |
#from random forest
dt.predict(df1)
array([2.55298605, 1.51967627, 1.63526349, ..., 5.01076575, 4.74462633, 4.36936797])
#from light gradient boosting
x=results[results['model name']=='light gradient boost']['model'].iloc[0]
x.predict(df1)
array([2.1614543 , 1.44661374, 0.98799457, ..., 5.11339754, 4.85435859, 4.5133741 ])
plt.Figure(figsize=(20,10))
plt.plot(range(len(df1.iloc[:100])),dt.predict(df1.iloc[:100]),label="RF")
plt.plot(range(len(df1.iloc[:100])),x.predict(df1.iloc[:100]),label="Lgbm",color='r')
p=plt.title('Random Forest Vs Light GBM prediction')
p=plt.legend()