Home APIs DataExploration DataMining Showcase Challenges Contact
NaturalLanguageProcessing PortoSeguro TwoSigma

Strucai's Data Exploration: Two Sigma Portfolio Time Series

What is data about?

This notebook explores financial data from a Kaggle competition Two Sigma. In this competition one is set to understand a portfolio using time series to monitor trade dynamics. The focus is on exploring the data.

two_sigma_financial_modelling
In [1]:
# Model portfolio returns using time series analysis
__author__ = 'Mizio'

# Not so often used imports
# import csv as csv
# import matplotlib
# matplotlib.use('TkAgg')
# import sys
# sys.path.append('/custom/path/to/modules')
# from sklearn.model_selection import cross_val_score

# Used imports
import numpy as np
import pandas as pd
import pylab as plt
from fancyimpute import MICE
import random
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
from scipy.stats import skew
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold, train_test_split
from sklearn.linear_model import LassoCV
from sklearn.ensemble import IsolationForest
from sklearn.preprocessing import StandardScaler, LabelBinarizer
from sklearn_pandas import DataFrameMapper
import xgboost as xgb
from matplotlib.backends.backend_pdf import PdfPages
import datetime
from sklearn.cluster import FeatureAgglomeration
import seaborn as sns

class TwoSigmaFinModTools:
    def __init__(self, is_portfolio_predictions=0, number_of_assets_in_portfolio=11):
        self.correlation_coeffecients = []
        self.is_portfolio_predictions = is_portfolio_predictions
        self.df = TwoSigmaFinModTools.df
        self.number_of_assets_in_portfolio = number_of_assets_in_portfolio
        # self.df_test = TwoSigmaFinModTools.df_test
        self.df_all_feature_var_names = []
        self.df_test_all_feature_var_names = []
        self.timestamp = datetime.datetime.now().strftime('%Y%m%d_%Hh%Mm%Ss')
        self.is_with_log1p_call_outcome = 0

    # Private variables
    _non_numerical_feature_names = []
    _numerical_feature_names = []
    _is_one_hot_encoder = 0
    _feature_names_num = []
    _save_path = '/home/mizio/Documents/Kaggle/TwoSigmaFinancialModelling/'
    _is_not_import_data = 0
    is_dataframe_with_target_value = 1

    ''' Pandas Data Frame '''
    # Load the data from the HDF5 file instead of csv. The file consist only of training data.
    # For upload.
    # with pd.HDFStore("../input/train.h5", "r") as train:
    # For local run.
    # Training data
    with pd.HDFStore("/home/mizio/Documents/Kaggle/TwoSigmaFinancialModelling/input/train.h5", "r") as train:
        df = train.get("train")

    # Only training data was provided
    # df_test = []

    @staticmethod
    def square_feet_to_meters(area):
        square_meter_per_square_feet = 0.3048**2
        return area*square_meter_per_square_feet

    @staticmethod
    def extract_numerical_features(df):
        df = df.copy()
        df = df.copy()
        non_numerical_feature_names = df.columns[np.where(TwoSigmaFinModTools.numerical_feature_logical_incl_hidden_num(
            df) == 0)]
        return non_numerical_feature_names

    @staticmethod
    def extract_non_numerical_features(df):
        df = df.copy()
        non_numerical_feature_names = df.columns[np.where(TwoSigmaFinModTools.numerical_feature_logical_incl_hidden_num(
            df))]
        return non_numerical_feature_names

    @staticmethod
    def numerical_feature_logical_incl_hidden_num(df):
        logical_of_non_numeric_features = np.zeros(df.columns.shape[0], dtype=int)
        for ite in np.arange(0, df.columns.shape[0]):
            try:
                str(df[df.columns[ite]][0]) + df[df.columns[ite]][0]
                logical_of_non_numeric_features[ite] = True
            except TypeError:
                # print('Oops')
                hack = ''
        return logical_of_non_numeric_features

    def clean_data(self, df, is_with_MICE=0):
        df = df.copy()
        if df.isnull().sum().sum() > 0:
            if is_with_MICE:
                # Imputation using MICE
                numerical_features_names = self.extract_numerical_features(df)
                df.loc[:, tuple(numerical_features_names)] = self.estimate_by_mice(df[numerical_features_names])
            else:
                if any(tuple(df.columns == 'y')):
                    df = df.dropna()
                else:
                    df = df.dropna(1)
                    TwoSigmaFinModTools._feature_names_num = pd.Series(data=np.intersect1d(
                        TwoSigmaFinModTools._feature_names_num.values, df.columns), dtype=object)
        TwoSigmaFinModTools._numerical_feature_names = TwoSigmaFinModTools.extract_numerical_features(df)
        return df

    @staticmethod
    def encode_labels_in_numeric_format(df, estimated_var):
        # Transform non-numeric labels into numerical values
        # Cons.: gives additional unwanted structure to data, since some values are high and others low, despite labels
        # where no such comparing measure exists.
        # Alternative: use one-hot-encoding giving all labels their own column represented with only binary values.
        feature_name_num = ''.join([estimated_var, 'Num'])
        mask = ~df[estimated_var].isnull()
        df[feature_name_num] = df[estimated_var]
        df.loc[mask, tuple([feature_name_num])] = np.reshape(df[estimated_var].factorize()[0][mask[mask == 1].index],
                                                             (df.shape[0], 1))

    @staticmethod
    def label_classes(df, estimated_var):
        le = LabelEncoder()
        le.fit(df[estimated_var].values)
        return le.classes_

    @staticmethod
    def one_hot_encoder(df, estimated_var):
        df_class = df.copy()
        ohe = OneHotEncoder()
        label_classes = df_class[estimated_var].factorize()[1]
        new_one_hot_encoded_features = [''.join([estimated_var, '_', x]) for x in label_classes]
        mask = ~df[estimated_var].isnull()
        feature_var_values = ohe.fit_transform(np.reshape(np.array(df[''.join([estimated_var, 'Num'])][mask].values),
                                                          (df[mask].shape[0], 1))).toarray().astype(int)
        # Create new feature_var columns with one-hot encoded values
        for ite in new_one_hot_encoded_features:
            df[ite] = df[estimated_var]
        df.loc[mask, tuple(new_one_hot_encoded_features)] = feature_var_values

    @staticmethod
    def add_feature_var_name_with_zeros(df, feature_var_name):
        df[feature_var_name] = np.zeros((df.shape[0], 1), dtype=int)
        pass

    @staticmethod
    def feature_var_names_in_training_set_not_in_test_set(feature_var_names_training, feature_var_names_test):
        feature_var_name_addition_list = []
        for feature_var_name in feature_var_names_training:
            if not any(tuple(feature_var_name == feature_var_names_test)):
                feature_var_name_addition_list.append(feature_var_name)
        return np.array(feature_var_name_addition_list)

    def feature_mapping_to_numerical_values(self, df):
        TwoSigmaFinModTools._is_one_hot_encoder = 0
        mask = ~df.isnull()
        # Assume that training set has all possible feature_var_names
        # Although it may occur in real life that a training set may hold a feature_var_name. But it is probably
        # avoided since such features cannot
        # be part of the trained learning algo.
        # Add missing feature_var_names of training set not occurring in test set. Add these with zeros in columns.
        if not any(tuple(df.columns == 'y')):
            # All one-hot encoded feature var names occurring in test data is assigned the public variable
            # df_test_all_feature_var_names.
            self.df_test_all_feature_var_names = df.columns

        _feature_names_num = np.zeros((TwoSigmaFinModTools._non_numerical_feature_names.shape[0],), dtype=object)
        ith = 0
        for feature_name in TwoSigmaFinModTools._non_numerical_feature_names:
            # Create a feature_nameNum list
            feature_name_num = ''.join([feature_name, 'Num'])
            _feature_names_num[ith] = feature_name_num
            ith += 1
            TwoSigmaFinModTools.encode_labels_in_numeric_format(df, feature_name)

            if TwoSigmaFinModTools._is_one_hot_encoder:
                is_with_label_binarizer = 0
                if is_with_label_binarizer:
                    mapper_df = DataFrameMapper([(feature_name, LabelBinarizer())], df_out=True)
                    feature_var_values = mapper_df.fit_transform(df.copy())
                    print(df[feature_name].isnull().sum().sum())
                    print(df[feature_name][mask[feature_name]].isnull().sum().sum())
                    for ite in feature_var_values.columns:
                        df[ite] = feature_var_values[ite]
                else:
                    TwoSigmaFinModTools.one_hot_encoder(df, feature_name)
        TwoSigmaFinModTools._feature_names_num = pd.Series(data=_feature_names_num, dtype=object)

    @staticmethod
    def feature_agglomeration(df, number_of_clusters=int(df.shape[1] / 1.2)):
        df = df.copy()
        # Todo: find optimal number of clusters for the feature clustering
        # number_of_clusters = int(df.shape[1]/2)

        agglomerated_features = FeatureAgglomeration(n_clusters=number_of_clusters)
        if any(tuple(df.columns == 'Call Outcome')):
            res = agglomerated_features.fit_transform(np.reshape(np.array(df.dropna().values), df.dropna()
                                                                 .shape), y=df['Call Outcome'].values)
        else:
            res = agglomerated_features.fit_transform(np.reshape(np.array(df.values), df.shape))
        df = pd.DataFrame(data=res)
        return df

    @staticmethod
    def dendrogram(df, number_of_clusters=int(df.shape[1] / 1.2)):
        # Create Dendrogram
        agglomerated_features = FeatureAgglomeration(n_clusters=number_of_clusters)
        used_networks = np.arange(0, number_of_clusters, dtype=int)

        # Create a custom palette to identify the networks
        network_pal = sns.cubehelix_palette(len(used_networks),
                                            light=.9, dark=.1, reverse=True,
                                            start=1, rot=-2)
        network_lut = dict(zip(map(str, df.columns), network_pal))

        # Convert the palette to vectors that will be drawn on the side of the matrix
        networks = df.columns.get_level_values(None)
        network_colors = pd.Series(networks, index=df.columns).map(network_lut)
        sns.set(font="monospace")
        # Create custom colormap
        cmap = sns.diverging_palette(h_neg=210, h_pos=350, s=90, l=30, as_cmap=True)
        cg = sns.clustermap(df.astype(float).corr(), cmap=cmap, linewidths=.5, row_colors=network_colors,
                            col_colors=network_colors)
        plt.setp(cg.ax_heatmap.yaxis.get_majorticklabels(), rotation=0)
        plt.setp(cg.ax_heatmap.xaxis.get_majorticklabels(), rotation=90)
        plt.show()


    def feature_engineering(self, df):
        is_skewness_correction_for_all_features = 1
        if is_skewness_correction_for_all_features:
            # Correcting for skewness
            # Treat all numerical variables that were not one-hot encoded
            if any(tuple(df.columns == 'y')):
                self.is_with_log1p_call_outcome = 1

            numerical_feature_names_of_non_modified_df = TwoSigmaFinModTools._numerical_feature_names

            if TwoSigmaFinModTools._is_one_hot_encoder:
                numerical_feature_names_of_non_modified_df = numerical_feature_names_of_non_modified_df.values
            else:
                numerical_feature_names_of_non_modified_df = np.concatenate(
                    [TwoSigmaFinModTools._feature_names_num.values, numerical_feature_names_of_non_modified_df.values])

            relevant_features = df[numerical_feature_names_of_non_modified_df].columns[
                (df[numerical_feature_names_of_non_modified_df].columns != 'Id')]
            self.skew_correction(df, relevant_features)
        else:
            # Only scale down Call Outcome, since all leave other numerical features standardized.
            if any(tuple(df.columns == 'Call Outcome')):
                self.is_with_log1p_call_outcome = 1
                df.loc[:, tuple(['Call Outcome'])] = np.log1p(df['Call Outcome'])

    @staticmethod
    def skew_correction(df, numerical_features):
        # Skew correction
        skewed_feats = df[numerical_features].apply(lambda x: skew(x.dropna()))  # compute skewness
        skewed_feats = skewed_feats[skewed_feats > 0.75]
        skewed_feats = skewed_feats.index
        df.loc[:, tuple(skewed_feats)] = np.log1p(np.asarray(df[skewed_feats], dtype=float))

    def drop_variable_before_preparation(self, df, limit_of_nans):
        # Acceptable limit of NaN in features
        for feature in self.features_with_missing_values_in_dataframe(df).index:
            if df[feature].isnull().sum() > limit_of_nans*df.shape[0]:
                df = df.drop([feature], axis=1)
        return df

    def drop_variable(self, df):
        # df = df.drop(['Id'], axis=1)

        if not any(tuple(df.columns == 'y')):
            # All feature var names occuring in test data is assigned the public varaible df_test_all_feature_var_names.
            self.df_test_all_feature_var_names = df.columns
        return df

    def save_dataframe(self, df):
        with pd.HDFStore(''.join([TwoSigmaFinModTools._save_path, 'train_debug', self.timestamp, '.h5']), "w") as train:
            train.put("train_debug", df)

    @staticmethod
    def load_dataframe():
        dataframe_name = 'train_debug'

        # one-hot encoded
        # not one-hot
        # date_time = '20170613_19h09m40s'
        # date_time = '20170613_19h34m31s'
        # date_time = '20170614_00h07m32s'
        date_time = '20170619_11h47m22s'
        with pd.HDFStore(''.join([TwoSigmaFinModTools._save_path, dataframe_name, date_time, '.h5']), 'r') as train:
            return train.get(dataframe_name)

    @staticmethod
    def drop_num_features(df):
        # Drop all categorical feature helping columns ('Num')
        for feature_name in TwoSigmaFinModTools._feature_names_num:
            df = df.drop([feature_name], axis=1)
        return df

    @staticmethod
    def transform_data_to_portfolio(df):
        # The mean of a portfolio corresponds to the return.
        # A way to easier model the mean is to fit the cumulative mean instead.
        df_transformed = df.groupby('timestamp').agg([np.mean]).reset_index()
        df_transformed.columns = df_transformed.columns.get_level_values(0)
        return df_transformed

    def portfolio_timestamp_period_with_most_highly_corr_assets(self, df):
        # A first approximation to model portfolio returns:
        # i) Find assets that correlates with y, where correlation is higher than a threshold value
        # ii) Include only above assets and find maximum timestamp period with most assets
        # iii) Transform target value y to be cumulative mean of y in order to obtain monotonic behaviour
        # iv) Train model to predict transformed target value with the selected most correlated assets in selected
        # timestamp interval
        # v) Run model on test data and apply inverse transform to get target value y.

        # From plot it looks like a lot of assets are bought and sold at first and last timestamp.
        # We should of course primarily select assets based on how much they are correlated with y

        correlation_coeffecients = self.correlation_coeffecients
        names_of_assets = correlation_coeffecients.loc[correlation_coeffecients.index != 'y'].sort_values(
            ascending=False).head(self.number_of_assets_in_portfolio).index
        # Todo: make a check if any intermediate sales assets are among the most corr with y
        return df.loc[:, names_of_assets]

    def prepare_data(self, df):
        df = df.copy()

        TwoSigmaFinModTools._is_not_import_data = 1
        if TwoSigmaFinModTools._is_not_import_data:
            if self.is_portfolio_predictions:
                df = TwoSigmaFinModTools.transform_data_to_portfolio(df)
                df = self.portfolio_timestamp_period_with_most_highly_corr_assets(df)

            df = self.drop_variable_before_preparation(df, limit_of_nans=1.0)

            TwoSigmaFinModTools._non_numerical_feature_names = TwoSigmaFinModTools.extract_non_numerical_features(df)
            TwoSigmaFinModTools._numerical_feature_names = TwoSigmaFinModTools.extract_numerical_features(df)

            self.feature_mapping_to_numerical_values(df)
            if TwoSigmaFinModTools._is_one_hot_encoder:
                df = TwoSigmaFinModTools.drop_num_features(df)
            self.feature_engineering(df)
            df = self.clean_data(df, is_with_MICE=1)
            df = self.feature_scaling(df)

            is_save_dataframe = 1
            if is_save_dataframe:
                self.save_dataframe(df)
                TwoSigmaFinModTools.is_dataframe_with_target_value = 0
        else:
            df = TwoSigmaFinModTools.load_dataframe()
            TwoSigmaFinModTools._non_numerical_feature_names = TwoSigmaFinModTools.extract_non_numerical_features(df)
            TwoSigmaFinModTools._numerical_feature_names = TwoSigmaFinModTools.extract_numerical_features(df)
            TwoSigmaFinModTools.is_dataframe_with_target_value = 0

        df = self.drop_variable(df)
        return df

    @staticmethod
    def features_with_null_logical(df, axis=1):
        row_length = len(df._get_axis(0))
        # Axis to count non null values in. aggregate_axis=0 implies counting for every feature
        aggregate_axis = 1 - axis
        features_non_null_series = df.count(axis=aggregate_axis)
        # Whenever count() differs from row_length it implies a null value exists in feature column and a False in mask
        mask = row_length == features_non_null_series
        return mask

    @staticmethod
    def estimate_by_mice(df):
        df_estimated_var = df.copy()
        random.seed(129)
        mice = MICE()  # model=RandomForestClassifier(n_estimators=100))
        res = mice.complete(np.asarray(df.values, dtype=float))
        df_estimated_var.loc[:, df.columns] = res[:][:]
        return df_estimated_var

    def feature_scaling(self, df):
        df = df.copy()
        # Standardization (centering and scaling) of dataset that removes mean and scales to unit variance
        standard_scaler = StandardScaler()
        numerical_feature_names_of_non_modified_df = TwoSigmaFinModTools._numerical_feature_names
        if any(tuple(df.columns == 'y')):
            if not TwoSigmaFinModTools._is_one_hot_encoder:
                numerical_feature_names_of_non_modified_df = np.concatenate(
                    [TwoSigmaFinModTools._feature_names_num.values, numerical_feature_names_of_non_modified_df.values])
            # Include scaling of y
            y = df['y'].values
            relevant_features = df[numerical_feature_names_of_non_modified_df].columns[
                (df[numerical_feature_names_of_non_modified_df].columns != 'y')
                & (df[numerical_feature_names_of_non_modified_df].columns != 'id')]
            mask = ~df[relevant_features].isnull()
            res = standard_scaler.fit_transform(X=df[relevant_features][mask].values, y=y)
            if (~mask).sum().sum() > 0:
                df = self.standardize_relevant_features(df, relevant_features, res)
            else:
                df.loc[:, tuple(relevant_features)] = res
        else:
            if not TwoSigmaFinModTools._is_one_hot_encoder:
                numerical_feature_names_of_non_modified_df = np.concatenate(
                    [TwoSigmaFinModTools._feature_names_num.values, numerical_feature_names_of_non_modified_df.values])
            relevant_features = df[numerical_feature_names_of_non_modified_df].columns[
                (df[numerical_feature_names_of_non_modified_df].columns != 'id')]
            mask = ~df[relevant_features].isnull()
            res = standard_scaler.fit_transform(df[relevant_features][mask].values)
            if mask.sum().sum() > 0:
                df = self.standardize_relevant_features(df, relevant_features, res)
            else:
                df.loc[:, tuple(relevant_features)] = res
        return df

    @staticmethod
    def standardize_relevant_features(df, relevant_features, res):
        i_column = 0
        for feature in relevant_features:
            mask = ~df[feature].isnull()
            mask_index = mask[mask == 1].index
            df.loc[mask_index, tuple([feature])] = res[:, i_column, None]
            i_column += 1
        return df

    def missing_values_in_dataframe(self, df):
        mask = self.features_with_null_logical(df)
        print(df[mask[mask == 0].index.values].isnull().sum())
        print('\n')

    def features_with_missing_values_in_dataframe(self, df):
        df = df.copy()
        mask = self.features_with_null_logical(df)
        return df[mask[mask == 0].index.values].isnull().sum()

    @staticmethod
    def rmse_cv(model, x_train, y_train):
        rmse = np.sqrt(-cross_val_score(model, x_train, y_train, scoring='neg_mean_squared_error', cv=5))
        return rmse

    @staticmethod
    def rmse(y_pred, y_actual):
        n_samples = np.shape(y_pred)[0]
        squared_residuals_summed = 0.5*sum((y_pred - y_actual)**2)
        return np.sqrt(2.0*squared_residuals_summed/n_samples)

    def assets_with_intermediate_sales(self, df, is_with_intermediate_sale):
        df_grouped_by_id = df[['id', 'timestamp', 'y']].groupby('id').agg([np.min, np.max, len]).reset_index()
        df_grouped_by_id.sort_values([('timestamp', 'amax')], inplace=True, ascending=False)
        # Identification of intermediate sale positions
        # what are the length differences
        # 1) make cutting strategi that checks on amax until the intermediate amax is found.
        # 2) After first cut, decide on the left cutted part if amax has length len.
        # 3) If True continue by making additional cut on right hand part,
        # 3i) then make check on left part of new cut to see if amax equals len. If True iterate from 3) if False
        # iterate 4).
        # 4) If False continue with new cut on same left part,
        # 4i) then make check on left part of new cut to see if amax equals len. If True iterate from 3) if False
        # iterate 4).

        # id of assets with intermediate trades
        is_with_intermediate_sale = is_with_intermediate_sale.drop(['index'], axis=1)
        indices_with_intermediate_trades = np.where(is_with_intermediate_sale)[0]
        id_for_intermediate_trades = df_grouped_by_id.reset_index().loc[indices_with_intermediate_trades,].id.values

        # Timestamp length diffs with len for assets with intermediate sale
        timestamp_length_and_len_diffs = (df_grouped_by_id[('timestamp', 'amax')]
                                          - df_grouped_by_id[('timestamp', 'amin')]
                                          - (df_grouped_by_id[('timestamp', 'len')] - 1)).reset_index().loc[
            indices_with_intermediate_trades]
        print('\n')
        print('timestamp length and len diffs:', '\n')
        print(timestamp_length_and_len_diffs)

        # Cut in two and check if amax is equal to length of cutted part
        # Assuming only one intermediate sale exists
        intermediate_trade_timestamp_of_assets = np.zeros((len(timestamp_length_and_len_diffs), 2))
        for ite in np.arange(0, len(id_for_intermediate_trades)):
            id = id_for_intermediate_trades[ite]
            amin = df_grouped_by_id[df_grouped_by_id.id == id][('timestamp', 'amin')]
            amax = df_grouped_by_id[df_grouped_by_id.id == id][('timestamp', 'amax')]
            # Todo: More general case: What if there are several intermediate trades?
            intermediate_trade_timestamp_of_assets[ite] = self.recursive_left_right_check(df, amin, amax, id)

        intermediate_trades_dataframe = pd.DataFrame()
        intermediate_trades_dataframe['id'] = id_for_intermediate_trades
        intermediate_trades_dataframe['amin'] = intermediate_trade_timestamp_of_assets[0:, 0]
        intermediate_trades_dataframe['amax'] = intermediate_trade_timestamp_of_assets[0:, 1]
        return intermediate_trades_dataframe

    def recursive_left_right_check(self, df, amin, amax, id):
        '''
        # method structure
        # 1)compute left part
        # 2a) check if left part is unique, if True compute right part
        # 2a,i) return amax and amin of right part
        # 2a,ii) divide right part in two and compute again left part starting in 1)
        # 2b) else if left part is not unique then return amax and amin of left part
        # 2b,i) divide left part in two and compute again left part starting in 1)
        :param df:
        :param df_grouped_by_id:
        :param amin:
        :param amax:
        :return:
        '''
        asset_timestamps = df[['timestamp', 'id']][(df.id == id) & (df.timestamp >= amin.values[0])
                                                   & (df.timestamp <= amax.values[0])].groupby('timestamp').timestamp
        # Find midway timestamp of particular id
        midway_timestamp = asset_timestamps.apply(int).values[round(len(asset_timestamps.apply(int).values)/2)]

        is_timestamp_diff_equal_len_left, amin_left, amax_left, lenght_left = self.check_timestamps_left_part(
            df, midway_timestamp, amin, id)
        if is_timestamp_diff_equal_len_left.values[0]:
            is_timestamp_diff_equal_len_right, amin_right, amax_right, lenght_right = self.check_timestamps_right_part(
                df, midway_timestamp, amax, id)
            if lenght_right.values[0]:
                return amin_right, amax_right
            else:
                if lenght_left.values[0] == 2:
                    return amin_left, amax_left
                else:
                    return self.recursive_left_right_check(df, amin_right, amax_right, id)
        else:
            return self.recursive_left_right_check(df, amin_left, amax_left, id)

    def check_timestamps_left_part(self, df, midway_timestamps, amin, id):
        '''
        Check left part
        :param df:
        :param df_grouped_by_id:
        :param midway_timestamps:
        :return: True if intermediate sale is in left part False otherwise.
        '''
        df = df[df.id == id]
        df_timestamp_interval = df[(df.timestamp >= amin.values[0]) & (df.timestamp <= midway_timestamps)]
        df_timestamp_interval_aggregated = df_timestamp_interval.groupby('id').agg([np.min, np.max, len])
        amin_left = df_timestamp_interval_aggregated[('timestamp', 'amin')]
        amax_left = df_timestamp_interval_aggregated[('timestamp', 'amax')]
        lenght_left = df_timestamp_interval_aggregated[('timestamp', 'len')]
        is_timestamp_diff_equal_len_left = (amax_left - amin_left).values == (lenght_left - 1)
        return is_timestamp_diff_equal_len_left, amin_left, amax_left, lenght_left

    def check_timestamps_right_part(self, df, midway_timestamps, amax, id):
        '''
        Check right part
        :param df:
        :param df_grouped_by_id:
        :param midway_timestamps:
        :return: True if intermediate sale is in left part False otherwise.
        '''
        df = df[df.id == id]
        df_timestamp_interval = df[(df.timestamp > midway_timestamps) & (df.timestamp <= amax.values[0])]
        df_timestamp_interval_aggregated = df_timestamp_interval.groupby('id').agg([np.min, np.max, len])
        amin_right = df_timestamp_interval_aggregated[('timestamp', 'amin')]
        amax_right = df_timestamp_interval_aggregated[('timestamp', 'amax')]
        lenght_right = df_timestamp_interval_aggregated[('timestamp', 'len')]
        is_timestamp_diff_equal_len_right = (amax_right - amin_right).values == (lenght_right - 1)
        return is_timestamp_diff_equal_len_right, amin_right, amax_right, lenght_right

    def outlier_identification(self, model, x_train, y_train):
        # Split the training data into an extra set of test
        x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train)
        print('\nOutlier shapes')
        print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split))
        model.fit(x_train_split, y_train_split)
        y_predicted = model.predict(x_test_split)
        residuals = np.absolute(y_predicted - y_test_split)
        rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split)
        outliers_mask = residuals >= rmse_pred_vs_actual
        outliers_mask = np.concatenate([np.zeros((np.shape(y_train_split)[0],), dtype=bool), outliers_mask])
        not_an_outlier = outliers_mask == 0
        # Resample the training set from split, since the set was randomly split
        x_out = np.insert(x_train_split, np.shape(x_train_split)[0], x_test_split, axis=0)
        y_out = np.insert(y_train_split, np.shape(y_train_split)[0], y_test_split, axis=0)
        return x_out[not_an_outlier, ], y_out[not_an_outlier, ]

    def predicted_vs_actual_y_input_model(self, model, x_train_split, x_test_split, y_train_split, y_test_split,
                                          title_name):
        # Split the training data into an extra set of test
        # x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train)
        print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split))
        model.fit(x_train_split, y_train_split)
        y_predicted = model.predict(x_test_split)
        plt.figure(figsize=(10, 5))
        plt.scatter(y_test_split, y_predicted, s=20)
        rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split)
        plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)]))
        plt.xlabel('Actual y')
        plt.ylabel('Predicted y')
        plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)])
        plt.tight_layout()

    def predicted_vs_actual_y_xgb(self, xgb, best_nrounds, xgb_params, x_train_split, x_test_split, y_train_split,
                                  y_test_split, title_name):
        # Split the training data into an extra set of test
        # x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(x_train, y_train)
        dtrain_split = xgb.DMatrix(x_train_split, label=y_train_split)
        dtest_split = xgb.DMatrix(x_test_split)
        print(np.shape(x_train_split), np.shape(x_test_split), np.shape(y_train_split), np.shape(y_test_split))
        gbdt = xgb.train(xgb_params, dtrain_split, best_nrounds)
        y_predicted = gbdt.predict(dtest_split)
        plt.figure(figsize=(10, 5))
        plt.scatter(y_test_split, y_predicted, s=20)
        rmse_pred_vs_actual = self.rmse(y_predicted, y_test_split)
        plt.title(''.join([title_name, ', Predicted vs. Actual.', ' rmse = ', str(rmse_pred_vs_actual)]))
        plt.xlabel('Actual y')
        plt.ylabel('Predicted y')
        plt.plot([min(y_test_split), max(y_test_split)], [min(y_test_split), max(y_test_split)])
        plt.tight_layout()

    @staticmethod
    def multipage(filename, figs=None):
        pp = PdfPages(filename)
        if figs is None:
            figs = [plt.figure(n) for n in plt.get_fignums()]
        for fig in figs:
            fig.savefig(pp, format='pdf')
        pp.close()


def main():
    # Not so often used imports
    # import csv as csv
    # import matplotlib
    # matplotlib.use('TkAgg')
    # import sys
    # sys.path.append('/custom/path/to/modules')
    # from sklearn.model_selection import cross_val_score

    # Used imports
    import numpy as np
    import pandas as pd
    import pylab as plt
    import seaborn as sns
    from fancyimpute import MICE
    import random
    from sklearn.preprocessing import LabelEncoder
    from sklearn.preprocessing import OneHotEncoder
    from scipy.stats import skew
    from sklearn.model_selection import cross_val_score
    from sklearn.model_selection import KFold, train_test_split
    from sklearn.linear_model import LassoCV
    from sklearn.ensemble import IsolationForest
    from sklearn.preprocessing import StandardScaler, LabelBinarizer
    from sklearn_pandas import DataFrameMapper
    import xgboost as xgb
    from matplotlib.backends.backend_pdf import PdfPages
    import datetime
    from sklearn.cluster import FeatureAgglomeration
    from sklearn.feature_selection import SelectFromModel
    from sklearn.ensemble import RandomForestRegressor
    pd.set_option('display.max_columns', 120)

    two_sigma_fin_mod_tools = TwoSigmaFinModTools(is_portfolio_predictions=1, number_of_assets_in_portfolio=200)
    df = two_sigma_fin_mod_tools.df.copy()
    # Partioning. train_test_split() has default 25% size for test data
    # Generate random sequence from 0 to shape[0] of df
    indices_shuffled = np.arange(df.shape[0])
    np.random.shuffle(indices_shuffled)
    length_of_75_percent = round(0.75*df.shape[0])
    indices_75_percent = indices_shuffled[:length_of_75_percent]
    indices_25_percent = indices_shuffled[length_of_75_percent:]
    df_train = two_sigma_fin_mod_tools.df.copy().loc[indices_75_percent, ]
    df_test = two_sigma_fin_mod_tools.df.copy().loc[indices_25_percent, ]
    id_df_test = df_test.id

    is_explore_data = 1
    if is_explore_data:
        # Overview of train data
        print('\n TRAINING DATA:----------------------------------------------- \n')
        # print(df.head(3))
        # print('\n')
        # print(df.info())
        # print('\n')
        # print(df.describe())
        # print('\n')
        # print(df.dtypes)
        # print(df.get_dtype_counts())

        # Histogram of features in the portfolio
        # Each asset is identified by it's 'id'.
        print(len(df.id.unique()))  # Shows the number of asset (financial instruments) that are being tracked.
        print(len(df.timestamp.unique()))  # shows the number of periods in time
        features = ['timestamp', 'derived_0', 'derived_1', 'derived_2', 'derived_3', 'derived_4']
        # features = ['timestamp', 'derived_1']
        # df[features].groupby('timestamp').agg([np.mean]).reset_index().apply(np.log1p).hist(bins='auto', alpha=0.5)

        df_derived_features = df[features].groupby('timestamp').agg([np.mean, np.std, len]).reset_index()
        print(df_derived_features.head())
        print(df_derived_features.describe())

        # Examine individual assets that are identified by the 'id'
        print('\n Assets:----------------------------------------------- \n')
        df_assets = df.groupby('id')['y'].agg(['mean', 'std', len]).reset_index()
        print(df_assets.head())

        # Plot target value of asset id=0 as function of timestamp
        asset_id = 0
        asset_0 = df[df.id == asset_id]
        # asset_0 = df.loc[df.id == 0, ('timestamp', 'y')].groupby('timestamp')
        plt.figure()
        plt.plot(asset_0.timestamp.values, asset_0.y.values, '.')
        plt.plot(asset_0.timestamp.values, asset_0.y.values.cumsum())
        plt.legend(('asset value', 'cumulative asset value'), loc=1, borderaxespad=0.)
        plt.xlabel('timestamp')
        plt.ylabel('asset value')
        plt.title(''.join(['Asset ', str(asset_id)]))

        # When are the assets sold and bought?
        # how can we be sure that they are not sold in between and hold for less time? checking on amax of timestamp
        # just indicates first time the asset is bought and last time indicates last time the asset is sold.

        df_grouped_by_id = df[['id', 'timestamp', 'y']].groupby('id').agg([np.min, np.max, len]).reset_index()
        df_grouped_by_id.sort_values([('timestamp', 'amax')], inplace=True, ascending=False)
        print(df_grouped_by_id.head())

        # Plot without check on intermediate sales
        plt.figure()
        plt.plot(df_grouped_by_id[('timestamp', 'amin')], df_grouped_by_id.id, '.', label='bought')
        plt.plot(df_grouped_by_id[('timestamp', 'amax')], df_grouped_by_id.id, '.', color='r', label='sold')
        plt.xlabel('timestamp')
        plt.ylabel('asset id')
        plt.legend()

        # Check on intermediate sales
        # check if len - 1 of timestamps equals amax - amin
        is_with_intermediate_sale = ((df_grouped_by_id[('timestamp', 'amax')] - df_grouped_by_id[('timestamp', 'amin')])
                                     != (df_grouped_by_id[('timestamp', 'len')] - 1)).reset_index()
        print(''.join(['Number of intermediate sold assets: ', str(int(is_with_intermediate_sale.sum()[0]))]))
        print(df_grouped_by_id.reset_index().loc[np.where(is_with_intermediate_sale.drop(['index'], axis=1))[0],])
        intermediate_sales_df = two_sigma_fin_mod_tools.assets_with_intermediate_sales(df, is_with_intermediate_sale)
        print(intermediate_sales_df)

        # Plot only intermediate sales of assets.
        # Notice for intermediate sales assets are sold at amin and bought at amax.
        plt.figure()
        amin_values = intermediate_sales_df.amax.values
        amax_values = intermediate_sales_df.amin.values
        id_array = intermediate_sales_df.id.values
        plt.plot(amin_values, id_array, '.', label='bought')
        plt.plot(amax_values, id_array, '.', color='r', label='sold')
        plt.title('Only intermediate trades')
        plt.xlabel('timestamp')
        plt.ylabel('asset id')
        plt.legend()

        # Plot includes intermediate sales of assets.
        # Notice for intermediate sales assets are sold at amin and bought at amax.
        plt.figure()
        amin_values = np.insert(df_grouped_by_id[('timestamp', 'amin')].values, np.shape(
            df_grouped_by_id[('timestamp', 'amin')].values)[0], intermediate_sales_df.amax.values, axis=0)
        amax_values = np.insert(df_grouped_by_id[('timestamp', 'amax')].values, np.shape(
            df_grouped_by_id[('timestamp', 'amax')].values)[0], intermediate_sales_df.amin.values, axis=0)
        id_array = np.insert(df_grouped_by_id.id.values, np.shape(df_grouped_by_id.id.values)[0],
                             intermediate_sales_df.id.values)
        plt.plot(amin_values, id_array, '.', label='bought')
        plt.plot(amax_values, id_array, '.', color='r', label='sold')
        plt.title('With intermediate trades')
        plt.xlabel('timestamp')
        plt.ylabel('asset id')
        plt.legend()

        # Visualize market run over the time period
        market_return_df = df[['timestamp', 'y']].groupby('timestamp').agg([np.mean, np.std, len]).reset_index()
        # print(market_return_df.head())

        # How does the mean and std of the target value 'y' vary as function of timestamp?
        # How does size of the portfolio vary as function of timestamp?
        timestamp = market_return_df['timestamp']
        y_mean = np.array(market_return_df['y']['mean'])
        y_std = np.array(market_return_df['y']['std'])
        # Number of assets traded for each unique timestamp
        size_of_portfolio = np.array(market_return_df['y']['len'])

        f, axarr = plt.subplots(3, sharex=True)
        axarr[0].plot(timestamp, y_mean, '.')
        axarr[0].set_ylabel('y mean')

        axarr[1].plot(timestamp, y_std, '.')
        axarr[1].set_ylabel('y std')

        axarr[2].plot(timestamp, size_of_portfolio, '.')
        axarr[2].set_ylabel('size of portfolio')

        axarr[2].set_xlabel('timestamp')
        # Comm.: we see that timestamp 250 and 1550 has high variation in mean value.

        # Plot correlations between mean, std of 'y' and size of portfolio.
        sns.set()
        columns = ['mean', 'std', 'len']
        sns.pairplot(market_return_df['y'][columns], size=2.5)

        # Price chart for returns of portfolio. This corresponds to the mean of y of the portfolio.
        # Plot is together with mean of y of portfolio.
        plt.figure()
        plt.plot(timestamp, y_mean, '.')
        plt.plot(timestamp, y_mean.cumsum())
        plt.legend(('portfolio value', 'cumulative portfolio value'), loc=1, borderaxespad=0.)
        plt.xlabel('timestamp')
        plt.ylabel('y mean')
        plt.title('Portfolio returns')
        plt.show()

    ''' Prepare data '''
    is_prepare_data = 1
    if is_prepare_data:
        df_merged_train_and_test = pd.DataFrame(data=np.concatenate((df_train[df_train.columns[
            df_train.columns != 'y']].values, df_test[df_test.columns[df_test.columns != 'y']].values)),
                                                columns=df_test.columns[df_test.columns != 'y'])

        df_merged_train_and_test.index = np.arange(0, df_merged_train_and_test.shape[0])

        if two_sigma_fin_mod_tools.is_portfolio_predictions:
            two_sigma_fin_mod_tools.correlation_coeffecients = df.corr().y
            df_merged_train_and_test = two_sigma_fin_mod_tools.prepare_data(df_merged_train_and_test)
            y_mean = TwoSigmaFinModTools.transform_data_to_portfolio(df[['timestamp', 'y']]).y.values
            y_mean_cum = y_mean.cumsum()
        else:
            df_merged_train_and_test = two_sigma_fin_mod_tools.prepare_data(df_merged_train_and_test)

        df_test_num_features = two_sigma_fin_mod_tools.extract_numerical_features(df_merged_train_and_test)

        is_drop_duplicates = 0
        if is_drop_duplicates:
            # Do not drop duplicates in test
            uniques_indices = df_merged_train_and_test[:df_train.shape[0]][df_test_num_features].drop_duplicates().index
            df_merged_train_and_test = pd.DataFrame(data=np.concatenate((df_merged_train_and_test.loc[
                                                                             uniques_indices].values,
                                                                         df_merged_train_and_test[
                                                                         df_train.shape[0]::].values)),
                                                    columns=df_merged_train_and_test.columns)
            target_value = df_train.y.values[uniques_indices]

            train_data = np.concatenate((df_merged_train_and_test[df_test_num_features].values[
                                         :uniques_indices.shape[0]], np.reshape(target_value,
                                                                                (target_value.shape[0], 1))), axis=1)
            test_data = df_merged_train_and_test[uniques_indices.shape[0]::][df_test_num_features].values
        else:
            if two_sigma_fin_mod_tools.is_portfolio_predictions:
                # Implement correct way to separate training and test data. Basically, the two sets should have
                # each their own preparation, since an average is made over equal timestamps in the portfolio. Hence
                # it will not be possible to extract two individual averages after preparation.
                #
                # Hack
                # Make new train and test partition using now builtin method train_test_split() (default 25% test)
                x_train_split, x_test_split, y_train_split, y_test_split = train_test_split(
                    df_merged_train_and_test.values, y_mean_cum)
                train_data = np.concatenate((x_train_split, np.reshape(y_train_split, (y_train_split.shape[0], 1))),
                                            axis=1)
                test_data = np.concatenate((x_test_split, np.reshape(y_test_split, (y_test_split.shape[0], 1))),
                                           axis=1)
                id_df_test = np.arange(0, test_data.shape[0])
            else:
                train_data = np.concatenate(
                    (df_merged_train_and_test[df_test_num_features].values[:df_train.shape[0]],
                     np.reshape(df_train.y.values, (df_train.shape[0], 1))), axis=1)
                # test_data = df_merged_train_and_test[df_train.shape[0]::][df_test_num_features].values
                test_data = np.concatenate(
                    (df_merged_train_and_test[df_test_num_features].values[:df_test.shape[0]],
                     np.reshape(df_test.y.values, (df_test.shape[0], 1))), axis=1)

        # missing_values
        print('All df set missing values')
        two_sigma_fin_mod_tools.missing_values_in_dataframe(df)

    is_make_a_prediction = 1
    if is_make_a_prediction:
        ''' XGBoost and Regularized Linear Models and Random Forest '''
        print("\nPrediction Stats:")
        x_train = train_data[0::, :-1]
        y_train = train_data[0::, -1]
        y_test_data = test_data[0::, -1]
        test_data = test_data[0::, :-1]
        print('\nShapes train data')
        print(np.shape(x_train), np.shape(y_train))
        print('\nShapes test data')
        print(np.shape(test_data))

        is_lasso = 1
        if is_lasso:
            # x_train = np.asarray(x_train, dtype=long)
            # y_train = np.asarray(y_train, dtype=long)
            # test_data = np.asarray(test_data, dtype=long)

            # Regularized linear regression is needed to avoid overfitting even if you have lots of features
            lasso = LassoCV(alphas=[0.0001, 0.0003, 0.0006, 0.001, 0.003, 0.006, 0.01, 0.03, 0.06, 0.1,
                                    0.3, 0.6, 1],
                            max_iter=50000, cv=10)
            # Todo: make a copy of lasso object
            # lasso_copy = lasso

            # Exclude outliers
            # x_train, y_train = two_sigma_fin_mod_tools.outlier_identification(lasso, x_train, y_train)
            print('\nShape after outlier detection')
            print(np.shape(x_train), np.shape(y_train))

            # Feature selection with Lasso
            # Make comparison plot using only the train data.
            # Predicted vs. Actual Sale price
            title_name = 'LassoCV'
            two_sigma_fin_mod_tools.predicted_vs_actual_y_input_model(lasso, x_train, test_data, y_train, y_test_data,
                                                                      title_name)
            # plt.show()
            lasso.fit(x_train, y_train)
            alpha = lasso.alpha_
            print('best LassoCV alpha:', alpha)
            score = lasso.score(x_train, y_train)
            output_lasso = lasso.predict(X=test_data)
            print('\nSCORE Lasso linear model:---------------------------------------------------')
            print(score)

            is_feature_selection_prediction = 1
            if is_feature_selection_prediction:

                is_feature_selection_with_lasso = 1
                if is_feature_selection_with_lasso:
                    forest_feature_selection = lasso
                    add_name_of_regressor = 'Lasso'
                else:
                    add_name_of_regressor = 'Random Forest'
                    # Random forest (rf) regressor for feature selection
                    forest_feature_selection = RandomForestRegressor(n_estimators=240, max_depth=8)
                    forest_feature_selection = forest_feature_selection.fit(x_train, y_train)

                    # Evaluate variable importance with no cross validation
                    importances = forest_feature_selection.feature_importances_
                    # std = np.std([tree.feature_importances_ for tree in forest_feature_selection.estimators_], axis=0)
                    indices = np.argsort(importances)[::-1]

                    print('\nFeatures:')
                    df_test_num_features = two_sigma_fin_mod_tools.extract_numerical_features(df_test)
                    print(np.reshape(
                          np.append(np.array(list(df_test_num_features)), np.arange(0,
                                                                                    len(list(df_test_num_features)))),
                        (len(list(df_test_num_features)), 2), 'F'))  # , 2, len(list(df_test)))

                    print('\nFeature ranking:')
                    for f in range(x_train.shape[1]):
                        print('%d. feature %d (%f)' % (f + 1, indices[f], importances[indices[f]]))

                # Select most important features
                feature_selection_model = SelectFromModel(forest_feature_selection, prefit=True)
                # Todo: fix below method that returns empty array
                x_train_new = feature_selection_model.transform(x_train)
                print(x_train_new.shape)
                test_data_new = feature_selection_model.transform(test_data)
                print(test_data_new.shape)
                # We get that 21 features are selected

                title_name = ''.join([add_name_of_regressor, ' Feature Selection'])
                two_sigma_fin_mod_tools.predicted_vs_actual_y_input_model(forest_feature_selection, x_train, test_data,
                                                                          y_train, y_test_data, title_name)
                # plt.show()
                forest_feature_selected = forest_feature_selection.fit(x_train_new, y_train)
                score = forest_feature_selected.score(x_train_new, y_train)
                output_feature_selection_lasso = forest_feature_selection.predict(X=test_data_new)
                print('\nSCORE {0} regressor (feature select):---------------------------------------------------'
                      .format(add_name_of_regressor))
                print(score)

        ''' xgboost '''
        is_xgb_cv = 1
        if is_xgb_cv:
            seed = 0
            dtrain = xgb.DMatrix(x_train, label=y_train)
            dtest = xgb.DMatrix(test_data)

            xgb_params = {
                'seed': 0,
                'colsample_bytree': 0.8,
                'silent': 1,
                'subsample': 0.6,
                'learning_rate': 0.01,
                # 'booster': 'gblinear',  # default is gbtree
                'objective': 'reg:linear',
                'max_depth': 1,
                'num_parallel_tree': 1,
                'min_child_weight': 1,
                'eval_metric': 'rmse',
            }

            res = xgb.cv(xgb_params, dtrain, num_boost_round=10000, nfold=5, seed=seed, stratified=False,
                         early_stopping_rounds=100, verbose_eval=10, show_stdv=True)

            best_nrounds = res.shape[0] - 1
            cv_mean = res.iloc[-1, 0]
            cv_std = res.iloc[-1, 1]

            print('Ensemble-CV: {0}+{1}'.format(cv_mean, cv_std))
            title_name = 'xgb.cv'
            two_sigma_fin_mod_tools.predicted_vs_actual_y_xgb(xgb, best_nrounds, xgb_params, x_train, test_data,
                                                              y_train, y_test_data, title_name)
            # plt.show()
            gbdt = xgb.train(xgb_params, dtrain, best_nrounds)
            output_xgb_cv = gbdt.predict(dtest)

        # Averaging the output using four different machine learning estimators
        output = (output_feature_selection_lasso + output_xgb_cv) / 2.0
        # output = (output_lasso + output_xgb_cv) / 2.0
        # output = output_lasso
        # output = output_feature_selection_lasso
        # output = output_xgb_cv

        save_path = '/home/mizio/Documents/Kaggle/TwoSigmaFinancialModelling/predicted_vs_actual/'
        if two_sigma_fin_mod_tools.is_portfolio_predictions:
            text_is_portfolio = ''.join(['Number_of_assets', str(x_train.shape[1]), '_'])
        else:
            text_is_portfolio = ''
        two_sigma_fin_mod_tools.multipage(''.join([save_path, 'Overview_estimators_rmse_', text_is_portfolio,
                                                   two_sigma_fin_mod_tools.timestamp, '.pdf']))
        plt.show()

    if is_make_a_prediction:
        ''' Submission '''
        save_path = '/home/mizio/Documents/Kaggle/TwoSigmaFinancialModelling/submission/'

        # Exp() is needed in order to get the correct target value, since we took a log() earlier
        # if two_sigma_fin_mod_tools.is_with_log1p_SalePrice:
        #     output = np.expm1(output)

        submission = pd.DataFrame({'id': id_df_test, 'y': output})
        with pd.HDFStore(''.join([save_path, 'submission_two_sigma_fin_mod_tools_',
                                  two_sigma_fin_mod_tools.timestamp, '.h5']), 'w') as submit:
            submit.put('submission_two_sigma_fin_mod_tools', submission)
        print(two_sigma_fin_mod_tools.timestamp)


if __name__ == '__main__':
    main()
Using TensorFlow backend.
/home/mizio/anaconda2/envs/kaggle_env_pyth36/lib/python3.6/site-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.
  "This module will be removed in 0.20.", DeprecationWarning)
 TRAINING DATA:----------------------------------------------- 

1424
1813
  timestamp derived_0                  derived_1                  derived_2  \
                 mean       std    len      mean       std    len      mean   
0         0  0.039326  0.792450  750.0  0.111924  0.423766  750.0 -0.100055   
1         1  0.038036  0.797048  750.0  0.220014  2.570492  750.0  0.779802   
2         2  0.037680  0.799508  750.0  0.221317  2.571858  750.0  0.789735   
3         3  0.037372  0.801825  750.0  0.222442  2.573150  750.0  0.798304   
4         4  0.036876  0.805932  750.0  0.224254  2.575450  750.0  0.812107   

                    derived_3                  derived_4                   
         std    len      mean       std    len      mean       std    len  
0   3.133463  750.0 -0.093836  2.626800  750.0  0.080802  0.581652  750.0  
1  19.183546  750.0 -0.038055  2.331736  750.0  0.081712  0.582439  750.0  
2  19.158264  750.0 -0.025887  2.215398  750.0  0.082096  0.583017  750.0  
3  19.139776  750.0 -0.015389  2.131449  750.0  0.082428  0.583541  750.0  
4  19.116503  750.0  0.001520  2.033104  750.0  0.082963  0.584436  750.0  
         timestamp    derived_0                               derived_1  \
                           mean          std          len          mean   
count  1813.000000  1813.000000  1813.000000  1813.000000  1.813000e+03   
mean    906.000000    -4.982541   157.874374   943.605103  7.243518e+11   
std     523.512337     8.344781   212.110992    78.210724  2.274548e+12   
min       0.000000   -27.802135     0.781702   738.000000  1.119245e-01   
25%     453.000000   -10.381656    18.583494   908.000000  2.681750e-01   
50%     906.000000    -0.466401    31.042885   951.000000  3.407508e-01   
75%    1359.000000     0.053545   264.477325   973.000000  5.380114e+05   
max    1812.000000     2.787160   734.341248  1086.000000  1.150105e+13   

                                    derived_2                            \
                std          len         mean          std          len   
count  1.813000e+03  1813.000000  1813.000000  1813.000000  1813.000000   
mean   2.242711e+13   943.605103    -0.356182    24.056559   943.605103   
std    7.033462e+13    78.210724     2.481158    62.028286    78.210724   
min    4.237660e-01   738.000000   -13.849416     1.571783   738.000000   
25%    1.754980e+00   908.000000    -0.087640     2.796356   908.000000   
50%    2.471721e+00   951.000000     0.026206     4.678284   951.000000   
75%    1.647755e+07   973.000000     0.193288    13.692002   973.000000   
max    3.505464e+14  1086.000000     5.146124   370.850891  1086.000000   

         derived_3                              derived_4               \
              mean          std          len         mean          std   
count  1813.000000  1813.000000  1813.000000  1813.000000  1813.000000   
mean     -0.568780    23.792509   943.605103    20.064415   533.228821   
std       4.013206   108.070206    78.210724    33.664864   830.802795   
min     -46.759716     0.907156   738.000000    -3.400645     0.581652   
25%      -0.161980     2.469956   908.000000     0.054219     1.126296   
50%       0.010760     4.672005   951.000000     0.162291    31.561380   
75%       0.179317    12.980519   973.000000    37.072002   988.941284   
max       1.227720  1272.839966  1086.000000   113.223793  2770.350098   

                    
               len  
count  1813.000000  
mean    943.605103  
std      78.210724  
min     738.000000  
25%     908.000000  
50%     951.000000  
75%     973.000000  
max    1086.000000  

 Assets:----------------------------------------------- 

   id      mean       std     len
0   0  0.000207  0.014373  1646.0
1   6  0.000154  0.014151   728.0
2   7  0.000524  0.021986  1543.0
3  10 -0.000721  0.027715   116.0
4  11  0.000158  0.012680  1813.0
       id timestamp                     y                  
               amin  amax   len      amin      amax     len
0       0       167  1812  1646 -0.086094  0.093498  1646.0
891  1334         0  1812  1813 -0.086094  0.093498  1813.0
902  1352         0  1812  1813 -0.086094  0.093498  1813.0
901  1350         0  1812  1813 -0.086094  0.093498  1813.0
900  1349      1290  1812   523 -0.086094  0.093498   523.0
Number of intermediate sold assets: 10
     index    id timestamp                     y                  
                      amin  amax   len      amin      amax     len
248    783  1178         0  1812  1451 -0.086094  0.093498  1451.0
575   1209  1831         0  1812  1812 -0.086094  0.093498  1812.0
1076    51    77       473  1812   986 -0.086094  0.093498   986.0
1117  1144  1732         0  1595  1348 -0.086094  0.093498  1348.0
1151  1289  1950         0  1449   630 -0.086094  0.093498   630.0
1187   172   264         0  1340  1340 -0.086094  0.093498  1340.0
1246   192   289         0  1087   944 -0.086094  0.093498   944.0
1330    87   129         0   714   683 -0.086094  0.093498   683.0
1331  1027  1538        68   714   637 -0.086094  0.093498   637.0
1376   204   306         0   211   178 -0.086094  0.048407   178.0


timestamp length and len diffs: 

      index    0
248     783  362
575    1209    1
1076     51  354
1117   1144  248
1151   1289  820
1187    172    1
1246    192  144
1330     87   32
1331   1027   10
1376    204   34
     id   amin    amax
0  1178  365.0   737.0
1  1831  907.0  1812.0
2    77  721.0  1320.0
3  1732  675.0  1595.0
4  1950   41.0   900.0
5   264   85.0   169.0
6   289  473.0  1087.0
7   129  343.0   714.0
8  1538  387.0   714.0
9   306   90.0   211.0
[MICE] Completing matrix with shape (1813, 110)
[MICE] Starting imputation round 1/110, elapsed time 0.002
[MICE] Starting imputation round 2/110, elapsed time 0.102
[MICE] Starting imputation round 3/110, elapsed time 0.153
[MICE] Starting imputation round 4/110, elapsed time 0.206
[MICE] Starting imputation round 5/110, elapsed time 0.259
[MICE] Starting imputation round 6/110, elapsed time 0.311
[MICE] Starting imputation round 7/110, elapsed time 0.363
[MICE] Starting imputation round 8/110, elapsed time 0.422
[MICE] Starting imputation round 9/110, elapsed time 0.477
[MICE] Starting imputation round 10/110, elapsed time 0.531
[MICE] Starting imputation round 11/110, elapsed time 0.586
[MICE] Starting imputation round 12/110, elapsed time 0.643
[MICE] Starting imputation round 13/110, elapsed time 0.699
[MICE] Starting imputation round 14/110, elapsed time 0.753
[MICE] Starting imputation round 15/110, elapsed time 0.807
[MICE] Starting imputation round 16/110, elapsed time 0.862
[MICE] Starting imputation round 17/110, elapsed time 0.916
[MICE] Starting imputation round 18/110, elapsed time 0.969
[MICE] Starting imputation round 19/110, elapsed time 1.022
[MICE] Starting imputation round 20/110, elapsed time 1.077
[MICE] Starting imputation round 21/110, elapsed time 1.132
[MICE] Starting imputation round 22/110, elapsed time 1.185
[MICE] Starting imputation round 23/110, elapsed time 1.238
[MICE] Starting imputation round 24/110, elapsed time 1.293
[MICE] Starting imputation round 25/110, elapsed time 1.348
[MICE] Starting imputation round 26/110, elapsed time 1.401
[MICE] Starting imputation round 27/110, elapsed time 1.454
[MICE] Starting imputation round 28/110, elapsed time 1.510
[MICE] Starting imputation round 29/110, elapsed time 1.565
[MICE] Starting imputation round 30/110, elapsed time 1.622
[MICE] Starting imputation round 31/110, elapsed time 1.678
[MICE] Starting imputation round 32/110, elapsed time 1.739
[MICE] Starting imputation round 33/110, elapsed time 1.798
[MICE] Starting imputation round 34/110, elapsed time 1.857
[MICE] Starting imputation round 35/110, elapsed time 1.915
[MICE] Starting imputation round 36/110, elapsed time 1.975
[MICE] Starting imputation round 37/110, elapsed time 2.034
[MICE] Starting imputation round 38/110, elapsed time 2.091
[MICE] Starting imputation round 39/110, elapsed time 2.148
[MICE] Starting imputation round 40/110, elapsed time 2.212
[MICE] Starting imputation round 41/110, elapsed time 2.268
[MICE] Starting imputation round 42/110, elapsed time 2.322
[MICE] Starting imputation round 43/110, elapsed time 2.378
[MICE] Starting imputation round 44/110, elapsed time 2.434
[MICE] Starting imputation round 45/110, elapsed time 2.488
[MICE] Starting imputation round 46/110, elapsed time 2.543
[MICE] Starting imputation round 47/110, elapsed time 2.596
[MICE] Starting imputation round 48/110, elapsed time 2.650
[MICE] Starting imputation round 49/110, elapsed time 2.707
[MICE] Starting imputation round 50/110, elapsed time 2.760
[MICE] Starting imputation round 51/110, elapsed time 2.814
[MICE] Starting imputation round 52/110, elapsed time 2.869
[MICE] Starting imputation round 53/110, elapsed time 2.926
[MICE] Starting imputation round 54/110, elapsed time 2.979
[MICE] Starting imputation round 55/110, elapsed time 3.032
[MICE] Starting imputation round 56/110, elapsed time 3.087
[MICE] Starting imputation round 57/110, elapsed time 3.142
[MICE] Starting imputation round 58/110, elapsed time 3.195
[MICE] Starting imputation round 59/110, elapsed time 3.249
[MICE] Starting imputation round 60/110, elapsed time 3.304
[MICE] Starting imputation round 61/110, elapsed time 3.360
[MICE] Starting imputation round 62/110, elapsed time 3.414
[MICE] Starting imputation round 63/110, elapsed time 3.468
[MICE] Starting imputation round 64/110, elapsed time 3.524
[MICE] Starting imputation round 65/110, elapsed time 3.580
[MICE] Starting imputation round 66/110, elapsed time 3.633
[MICE] Starting imputation round 67/110, elapsed time 3.686
[MICE] Starting imputation round 68/110, elapsed time 3.740
[MICE] Starting imputation round 69/110, elapsed time 3.796
[MICE] Starting imputation round 70/110, elapsed time 3.851
[MICE] Starting imputation round 71/110, elapsed time 3.906
[MICE] Starting imputation round 72/110, elapsed time 3.961
[MICE] Starting imputation round 73/110, elapsed time 4.016
[MICE] Starting imputation round 74/110, elapsed time 4.070
[MICE] Starting imputation round 75/110, elapsed time 4.125
[MICE] Starting imputation round 76/110, elapsed time 4.180
[MICE] Starting imputation round 77/110, elapsed time 4.235
[MICE] Starting imputation round 78/110, elapsed time 4.289
[MICE] Starting imputation round 79/110, elapsed time 4.342
[MICE] Starting imputation round 80/110, elapsed time 4.398
[MICE] Starting imputation round 81/110, elapsed time 4.452
[MICE] Starting imputation round 82/110, elapsed time 4.505
[MICE] Starting imputation round 83/110, elapsed time 4.560
[MICE] Starting imputation round 84/110, elapsed time 4.617
[MICE] Starting imputation round 85/110, elapsed time 4.672
[MICE] Starting imputation round 86/110, elapsed time 4.726
[MICE] Starting imputation round 87/110, elapsed time 4.780
[MICE] Starting imputation round 88/110, elapsed time 4.839
[MICE] Starting imputation round 89/110, elapsed time 4.894
[MICE] Starting imputation round 90/110, elapsed time 4.948
[MICE] Starting imputation round 91/110, elapsed time 5.002
[MICE] Starting imputation round 92/110, elapsed time 5.058
[MICE] Starting imputation round 93/110, elapsed time 5.113
[MICE] Starting imputation round 94/110, elapsed time 5.167
[MICE] Starting imputation round 95/110, elapsed time 5.221
[MICE] Starting imputation round 96/110, elapsed time 5.279
[MICE] Starting imputation round 97/110, elapsed time 5.335
[MICE] Starting imputation round 98/110, elapsed time 5.390
[MICE] Starting imputation round 99/110, elapsed time 5.443
[MICE] Starting imputation round 100/110, elapsed time 5.499
[MICE] Starting imputation round 101/110, elapsed time 5.555
[MICE] Starting imputation round 102/110, elapsed time 5.608
[MICE] Starting imputation round 103/110, elapsed time 5.662
[MICE] Starting imputation round 104/110, elapsed time 5.719
[MICE] Starting imputation round 105/110, elapsed time 5.774
[MICE] Starting imputation round 106/110, elapsed time 5.829
[MICE] Starting imputation round 107/110, elapsed time 5.884
[MICE] Starting imputation round 108/110, elapsed time 5.939
[MICE] Starting imputation round 109/110, elapsed time 5.994
[MICE] Starting imputation round 110/110, elapsed time 6.048
All df set missing values
derived_0          72959
derived_1          81029
derived_2         398651
derived_3         149471
derived_4         406458
fundamental_0      23947
fundamental_1     679070
fundamental_2     368840
fundamental_3     454380
fundamental_5     962020
fundamental_6     701625
fundamental_7      26340
fundamental_8     373166
fundamental_9     565567
fundamental_10    112977
fundamental_11    368840
fundamental_12    110871
fundamental_13    355138
fundamental_14    356084
fundamental_15    354897
fundamental_16    355138
fundamental_17     97222
fundamental_18     15833
fundamental_19     54588
fundamental_20    110871
fundamental_21     54333
fundamental_22    558488
fundamental_23    356723
fundamental_24    576655
fundamental_25    121894
                   ...  
technical_10      167483
technical_11        4279
technical_12       19165
technical_13        4764
technical_14       14184
technical_16       19981
technical_17        4279
technical_18       20016
technical_19        2320
technical_20        4764
technical_21        2236
technical_24       71146
technical_25      208056
technical_27        2420
technical_28      262916
technical_29       61615
technical_30        4764
technical_31      182678
technical_32       19165
technical_33       14535
technical_35        3155
technical_36        2552
technical_37       19165
technical_38       19165
technical_39       20016
technical_40        2236
technical_41       44189
technical_42       20001
technical_43        4686
technical_44      236779
dtype: int64



Prediction Stats:

Shapes train data
(1359, 110) (1359,)

Shapes test data
(454, 110)

Shape after outlier detection
(1359, 110) (1359,)
(1359, 110) (454, 110) (1359,) (454,)
best LassoCV alpha: 0.0001

SCORE Lasso linear model:---------------------------------------------------
0.995661451832
(1359, 62)
(454, 62)
(1359, 110) (454, 110) (1359,) (454,)

SCORE Lasso regressor (feature select):---------------------------------------------------
0.995664361126
[0]	train-rmse:0.320005+0.00203768	test-rmse:0.319902+0.00836074
[10]	train-rmse:0.290374+0.00180384	test-rmse:0.290285+0.00795851
[20]	train-rmse:0.263595+0.0016211	test-rmse:0.263514+0.00752858
[30]	train-rmse:0.239478+0.00146523	test-rmse:0.239421+0.00715237
[40]	train-rmse:0.217713+0.00131864	test-rmse:0.217637+0.00682653
[50]	train-rmse:0.198074+0.00118492	test-rmse:0.198023+0.00651585
[60]	train-rmse:0.180378+0.00106037	test-rmse:0.180336+0.0062295
[70]	train-rmse:0.164382+0.000942736	test-rmse:0.164352+0.00599983
[80]	train-rmse:0.149955+0.000869757	test-rmse:0.149933+0.00580744
[90]	train-rmse:0.136897+0.000785809	test-rmse:0.136896+0.00558288
[100]	train-rmse:0.125102+0.000705913	test-rmse:0.125103+0.00534624
[110]	train-rmse:0.114442+0.000621751	test-rmse:0.114434+0.00523252
[120]	train-rmse:0.104825+0.000570106	test-rmse:0.104833+0.00504682
[130]	train-rmse:0.0961566+0.000525616	test-rmse:0.0961518+0.00491014
[140]	train-rmse:0.0883494+0.000468248	test-rmse:0.0883652+0.00474781
[150]	train-rmse:0.0813132+0.00045354	test-rmse:0.0813404+0.00454289
[160]	train-rmse:0.0749626+0.000412476	test-rmse:0.0749882+0.00437123
[170]	train-rmse:0.0692454+0.000371199	test-rmse:0.0692782+0.00419658
[180]	train-rmse:0.0640708+0.000327097	test-rmse:0.0641214+0.00402221
[190]	train-rmse:0.0594+0.000288276	test-rmse:0.0594502+0.00388757
[200]	train-rmse:0.055179+0.000247053	test-rmse:0.0552702+0.00374076
[210]	train-rmse:0.0513856+0.000218375	test-rmse:0.0514514+0.00361558
[220]	train-rmse:0.0479614+0.000197057	test-rmse:0.0480364+0.00344986
[230]	train-rmse:0.0448782+0.000169568	test-rmse:0.0449486+0.00328476
[240]	train-rmse:0.0420852+0.000146798	test-rmse:0.0421626+0.00314102
[250]	train-rmse:0.0395594+0.000127876	test-rmse:0.0396556+0.00302276
[260]	train-rmse:0.0372784+0.000127362	test-rmse:0.0373654+0.00288003
[270]	train-rmse:0.0352076+0.000122112	test-rmse:0.0353028+0.00271341
[280]	train-rmse:0.0333374+0.000110039	test-rmse:0.0334286+0.0026027
[290]	train-rmse:0.0316526+0.000103625	test-rmse:0.03173+0.00245612
[300]	train-rmse:0.0301362+0.000108671	test-rmse:0.0302174+0.00238106
[310]	train-rmse:0.0287574+0.000107504	test-rmse:0.028849+0.00224706
[320]	train-rmse:0.0275056+9.75082e-05	test-rmse:0.0276244+0.00215289
[330]	train-rmse:0.0263692+9.13551e-05	test-rmse:0.026513+0.0020525
[340]	train-rmse:0.0253356+9.79339e-05	test-rmse:0.0254842+0.00192282
[350]	train-rmse:0.0244028+9.11535e-05	test-rmse:0.0245416+0.00182305
[360]	train-rmse:0.0235474+9.63153e-05	test-rmse:0.023713+0.00170153
[370]	train-rmse:0.0227606+9.44386e-05	test-rmse:0.0229328+0.00164122
[380]	train-rmse:0.0220432+9.19661e-05	test-rmse:0.022218+0.00158033
[390]	train-rmse:0.021386+9.56347e-05	test-rmse:0.0215824+0.00150222
[400]	train-rmse:0.0207754+9.31356e-05	test-rmse:0.0209832+0.00143195
[410]	train-rmse:0.0202096+9.15852e-05	test-rmse:0.0204202+0.00137715
[420]	train-rmse:0.0196864+8.91843e-05	test-rmse:0.0199064+0.00132684
[430]	train-rmse:0.0192014+8.51483e-05	test-rmse:0.0194306+0.00128352
[440]	train-rmse:0.0187458+8.26206e-05	test-rmse:0.018996+0.00123722
[450]	train-rmse:0.0183144+8.6456e-05	test-rmse:0.0185712+0.00118651
[460]	train-rmse:0.017906+8.4233e-05	test-rmse:0.018174+0.00114409
[470]	train-rmse:0.0175294+8.65762e-05	test-rmse:0.0178166+0.00111494
[480]	train-rmse:0.0171686+8.46938e-05	test-rmse:0.0174538+0.0010896
[490]	train-rmse:0.0168336+8.45686e-05	test-rmse:0.0171242+0.00106307
[500]	train-rmse:0.0165126+8.23738e-05	test-rmse:0.016817+0.00103724
[510]	train-rmse:0.0162062+8.02057e-05	test-rmse:0.0165176+0.00099988
[520]	train-rmse:0.0159154+8.34592e-05	test-rmse:0.01623+0.000970932
[530]	train-rmse:0.0156362+8.44071e-05	test-rmse:0.015955+0.000946961
[540]	train-rmse:0.0153724+8.84366e-05	test-rmse:0.0157026+0.000922747
[550]	train-rmse:0.0151216+8.89215e-05	test-rmse:0.015459+0.000898212
[560]	train-rmse:0.0148842+8.91345e-05	test-rmse:0.0152304+0.000879221
[570]	train-rmse:0.0146508+8.94034e-05	test-rmse:0.0150054+0.000865116
[580]	train-rmse:0.014434+9.11526e-05	test-rmse:0.0147926+0.000850712
[590]	train-rmse:0.0142226+9.25691e-05	test-rmse:0.0145824+0.000830761
[600]	train-rmse:0.0140188+9.256e-05	test-rmse:0.0143928+0.000816334
[610]	train-rmse:0.0138246+9.16899e-05	test-rmse:0.0142096+0.000804969
[620]	train-rmse:0.0136398+9.01497e-05	test-rmse:0.0140298+0.000792493
[630]	train-rmse:0.013462+9.15096e-05	test-rmse:0.0138612+0.000786085
[640]	train-rmse:0.01329+8.99133e-05	test-rmse:0.0136992+0.000770591
[650]	train-rmse:0.0131256+8.95156e-05	test-rmse:0.0135378+0.000752422
[660]	train-rmse:0.0129672+9.01829e-05	test-rmse:0.0133902+0.000742769
[670]	train-rmse:0.0128154+9.04756e-05	test-rmse:0.0132462+0.000731935
[680]	train-rmse:0.0126682+9.07709e-05	test-rmse:0.0131074+0.000720063
[690]	train-rmse:0.0125284+9.12855e-05	test-rmse:0.0129748+0.000699998
[700]	train-rmse:0.0123892+9.2103e-05	test-rmse:0.0128428+0.00068777
[710]	train-rmse:0.0122588+9.10174e-05	test-rmse:0.0127232+0.000679158
[720]	train-rmse:0.0121308+8.99275e-05	test-rmse:0.012606+0.000662318
[730]	train-rmse:0.0120082+8.67788e-05	test-rmse:0.0124882+0.00065687
[740]	train-rmse:0.0118886+8.65069e-05	test-rmse:0.0123744+0.000652856
[750]	train-rmse:0.0117742+8.7392e-05	test-rmse:0.0122656+0.000645415
[760]	train-rmse:0.0116632+8.64856e-05	test-rmse:0.0121638+0.000640674
[770]	train-rmse:0.0115556+8.58617e-05	test-rmse:0.0120678+0.000634499
[780]	train-rmse:0.0114494+8.48495e-05	test-rmse:0.0119652+0.000632477
[790]	train-rmse:0.0113494+8.2993e-05	test-rmse:0.0118658+0.000624869
[800]	train-rmse:0.0112508+8.02905e-05	test-rmse:0.0117716+0.000623563
[810]	train-rmse:0.011153+7.98724e-05	test-rmse:0.0116786+0.000612794
[820]	train-rmse:0.0110588+7.98433e-05	test-rmse:0.0115924+0.00061156
[830]	train-rmse:0.0109688+7.95698e-05	test-rmse:0.0115056+0.000605423
[840]	train-rmse:0.0108786+7.74664e-05	test-rmse:0.0114224+0.000598441
[850]	train-rmse:0.010793+7.88999e-05	test-rmse:0.0113424+0.00059522
[860]	train-rmse:0.010708+7.84857e-05	test-rmse:0.0112644+0.000586348
[870]	train-rmse:0.0106258+7.72254e-05	test-rmse:0.0111848+0.000578991
[880]	train-rmse:0.0105448+7.5335e-05	test-rmse:0.0111128+0.00057209
[890]	train-rmse:0.010465+7.38783e-05	test-rmse:0.0110388+0.000570878
[900]	train-rmse:0.010386+7.29575e-05	test-rmse:0.0109726+0.000560567
[910]	train-rmse:0.010309+7.25176e-05	test-rmse:0.010899+0.000560234
[920]	train-rmse:0.0102344+7.12084e-05	test-rmse:0.010826+0.000554643
[930]	train-rmse:0.010162+7.20305e-05	test-rmse:0.0107614+0.000548359
[940]	train-rmse:0.0100912+7.12359e-05	test-rmse:0.010696+0.000542852
[950]	train-rmse:0.0100198+7.05702e-05	test-rmse:0.0106288+0.000538177
[960]	train-rmse:0.0099506+7.01501e-05	test-rmse:0.0105672+0.000530982
[970]	train-rmse:0.0098828+6.99497e-05	test-rmse:0.0105064+0.000526973
[980]	train-rmse:0.0098154+6.89249e-05	test-rmse:0.0104412+0.000522792
[990]	train-rmse:0.0097516+6.73902e-05	test-rmse:0.0103846+0.000526519
[1000]	train-rmse:0.0096884+6.76331e-05	test-rmse:0.010325+0.00052032
[1010]	train-rmse:0.0096252+6.69041e-05	test-rmse:0.0102658+0.000516856
[1020]	train-rmse:0.0095628+6.54504e-05	test-rmse:0.0101994+0.000517517
[1030]	train-rmse:0.0095028+6.61374e-05	test-rmse:0.0101436+0.000518476
[1040]	train-rmse:0.0094432+6.58829e-05	test-rmse:0.010089+0.000517141
[1050]	train-rmse:0.0093864+6.66231e-05	test-rmse:0.0100332+0.000514418
[1060]	train-rmse:0.0093302+6.49997e-05	test-rmse:0.0099818+0.000511663
[1070]	train-rmse:0.0092742+6.65023e-05	test-rmse:0.0099354+0.000510022
[1080]	train-rmse:0.0092194+6.66651e-05	test-rmse:0.0098852+0.000505994
[1090]	train-rmse:0.0091658+6.67964e-05	test-rmse:0.009836+0.000498437
[1100]	train-rmse:0.0091138+6.5603e-05	test-rmse:0.0097876+0.000496836
[1110]	train-rmse:0.0090604+6.57772e-05	test-rmse:0.0097368+0.000493753
[1120]	train-rmse:0.009011+6.54767e-05	test-rmse:0.0096924+0.00049171
[1130]	train-rmse:0.008961+6.54492e-05	test-rmse:0.0096482+0.000488107
[1140]	train-rmse:0.0089116+6.4214e-05	test-rmse:0.0096102+0.000489056
[1150]	train-rmse:0.0088626+6.3544e-05	test-rmse:0.009561+0.000489154
[1160]	train-rmse:0.008815+6.31443e-05	test-rmse:0.009518+0.00048558
[1170]	train-rmse:0.0087688+6.31772e-05	test-rmse:0.0094788+0.000481796
[1180]	train-rmse:0.008723+6.35327e-05	test-rmse:0.0094396+0.000476706
[1190]	train-rmse:0.0086784+6.35157e-05	test-rmse:0.0093966+0.000473657
[1200]	train-rmse:0.008634+6.28649e-05	test-rmse:0.0093556+0.00047168
[1210]	train-rmse:0.0085902+6.20271e-05	test-rmse:0.0093176+0.000469764
[1220]	train-rmse:0.0085466+6.1249e-05	test-rmse:0.009282+0.000469275
[1230]	train-rmse:0.0085048+6.08125e-05	test-rmse:0.0092458+0.000467962
[1240]	train-rmse:0.008463+5.98832e-05	test-rmse:0.0092136+0.000464181
[1250]	train-rmse:0.0084226+5.92236e-05	test-rmse:0.0091724+0.000464328
[1260]	train-rmse:0.008383+5.96356e-05	test-rmse:0.0091396+0.000459184
[1270]	train-rmse:0.0083436+6.0195e-05	test-rmse:0.0091038+0.000455388
[1280]	train-rmse:0.0083046+5.90173e-05	test-rmse:0.0090688+0.00045685
[1290]	train-rmse:0.0082678+5.85642e-05	test-rmse:0.009036+0.000452662
[1300]	train-rmse:0.0082296+5.91087e-05	test-rmse:0.0089996+0.000451683
[1310]	train-rmse:0.0081936+5.89461e-05	test-rmse:0.0089668+0.000450751
[1320]	train-rmse:0.0081562+5.85437e-05	test-rmse:0.0089316+0.000450719
[1330]	train-rmse:0.008121+5.90695e-05	test-rmse:0.0088986+0.000450306
[1340]	train-rmse:0.0080862+5.88231e-05	test-rmse:0.0088664+0.000448767
[1350]	train-rmse:0.0080516+5.83047e-05	test-rmse:0.008834+0.000446829
[1360]	train-rmse:0.0080178+5.84787e-05	test-rmse:0.0088016+0.00044301
[1370]	train-rmse:0.0079844+5.78502e-05	test-rmse:0.0087718+0.000442838
[1380]	train-rmse:0.0079508+5.79289e-05	test-rmse:0.0087434+0.000438893
[1390]	train-rmse:0.007919+5.76784e-05	test-rmse:0.0087186+0.000438234
[1400]	train-rmse:0.0078866+5.72629e-05	test-rmse:0.008691+0.000435067
[1410]	train-rmse:0.0078538+5.75757e-05	test-rmse:0.008661+0.000433764
[1420]	train-rmse:0.0078228+5.69224e-05	test-rmse:0.0086304+0.000432967
[1430]	train-rmse:0.0077918+5.72133e-05	test-rmse:0.0086018+0.000434211
[1440]	train-rmse:0.0077606+5.69442e-05	test-rmse:0.008576+0.000431122
[1450]	train-rmse:0.0077308+5.61192e-05	test-rmse:0.0085464+0.000432011
[1460]	train-rmse:0.0077012+5.70873e-05	test-rmse:0.0085206+0.000430697
[1470]	train-rmse:0.007672+5.73899e-05	test-rmse:0.0084936+0.00042813
[1480]	train-rmse:0.0076422+5.73739e-05	test-rmse:0.0084666+0.000429504
[1490]	train-rmse:0.0076142+5.65876e-05	test-rmse:0.0084432+0.00042616
[1500]	train-rmse:0.007587+5.67768e-05	test-rmse:0.0084198+0.000422443
[1510]	train-rmse:0.0075586+5.67542e-05	test-rmse:0.0083944+0.000420074
[1520]	train-rmse:0.0075306+5.76632e-05	test-rmse:0.0083692+0.00041916
[1530]	train-rmse:0.0075036+5.78467e-05	test-rmse:0.0083452+0.00041559
[1540]	train-rmse:0.0074778+5.78495e-05	test-rmse:0.0083222+0.000414559
[1550]	train-rmse:0.007451+5.78654e-05	test-rmse:0.0083+0.000415333
[1560]	train-rmse:0.0074252+5.77803e-05	test-rmse:0.0082772+0.000413188
[1570]	train-rmse:0.0073988+5.78045e-05	test-rmse:0.008255+0.000412862
[1580]	train-rmse:0.0073742+5.68063e-05	test-rmse:0.008236+0.000411017
[1590]	train-rmse:0.0073488+5.67324e-05	test-rmse:0.0082124+0.000410797
[1600]	train-rmse:0.0073244+5.66201e-05	test-rmse:0.008188+0.000405055
[1610]	train-rmse:0.0072994+5.59164e-05	test-rmse:0.0081656+0.000404702
[1620]	train-rmse:0.0072756+5.51855e-05	test-rmse:0.0081478+0.000404399
[1630]	train-rmse:0.0072508+5.51485e-05	test-rmse:0.0081256+0.000402209
[1640]	train-rmse:0.007228+5.45601e-05	test-rmse:0.0081052+0.000402806
[1650]	train-rmse:0.007205+5.48963e-05	test-rmse:0.0080858+0.000404415
[1660]	train-rmse:0.007182+5.50018e-05	test-rmse:0.0080716+0.000401108
[1670]	train-rmse:0.0071592+5.50542e-05	test-rmse:0.00805+0.000398056
[1680]	train-rmse:0.0071364+5.55863e-05	test-rmse:0.0080244+0.000398403
[1690]	train-rmse:0.0071138+5.54667e-05	test-rmse:0.0080052+0.000398929
[1700]	train-rmse:0.0070916+5.51093e-05	test-rmse:0.0079832+0.000397926
[1710]	train-rmse:0.0070702+5.56899e-05	test-rmse:0.0079672+0.000395688
[1720]	train-rmse:0.007048+5.49873e-05	test-rmse:0.0079458+0.000395842
[1730]	train-rmse:0.0070268+5.50832e-05	test-rmse:0.007927+0.000389294
[1740]	train-rmse:0.0070062+5.50215e-05	test-rmse:0.0079056+0.000386271
[1750]	train-rmse:0.0069854+5.49276e-05	test-rmse:0.0078854+0.000386875
[1760]	train-rmse:0.006965+5.48124e-05	test-rmse:0.0078648+0.000380471
[1770]	train-rmse:0.0069452+5.48503e-05	test-rmse:0.007849+0.000380366
[1780]	train-rmse:0.006925+5.47978e-05	test-rmse:0.007827+0.000381265
[1790]	train-rmse:0.006905+5.49509e-05	test-rmse:0.0078082+0.000380586
[1800]	train-rmse:0.006885+5.39222e-05	test-rmse:0.0077904+0.000384417
[1810]	train-rmse:0.0068652+5.35963e-05	test-rmse:0.0077742+0.000380398
[1820]	train-rmse:0.0068466+5.37721e-05	test-rmse:0.0077548+0.000378736
[1830]	train-rmse:0.0068276+5.41428e-05	test-rmse:0.0077396+0.000379317
[1840]	train-rmse:0.0068086+5.32601e-05	test-rmse:0.0077216+0.000380274
[1850]	train-rmse:0.00679+5.313e-05	test-rmse:0.0077034+0.000379243
[1860]	train-rmse:0.006771+5.24023e-05	test-rmse:0.0076842+0.000379615
[1870]	train-rmse:0.0067532+5.30298e-05	test-rmse:0.0076684+0.000374936
[1880]	train-rmse:0.0067348+5.27917e-05	test-rmse:0.0076538+0.000375111
[1890]	train-rmse:0.006717+5.26991e-05	test-rmse:0.0076394+0.000372807
[1900]	train-rmse:0.0066994+5.34775e-05	test-rmse:0.0076264+0.000370711
[1910]	train-rmse:0.0066814+5.36679e-05	test-rmse:0.0076112+0.000368617
[1920]	train-rmse:0.0066634+5.32038e-05	test-rmse:0.0075954+0.000369178
[1930]	train-rmse:0.0066466+5.32939e-05	test-rmse:0.0075796+0.00036875
[1940]	train-rmse:0.0066298+5.33457e-05	test-rmse:0.0075654+0.000367866
[1950]	train-rmse:0.0066126+5.38947e-05	test-rmse:0.007552+0.000365534
[1960]	train-rmse:0.0065956+5.41428e-05	test-rmse:0.0075368+0.000365313
[1970]	train-rmse:0.006579+5.4336e-05	test-rmse:0.0075212+0.000363203
[1980]	train-rmse:0.0065626+5.4017e-05	test-rmse:0.0075072+0.00036248
[1990]	train-rmse:0.0065456+5.37647e-05	test-rmse:0.0074948+0.000360384
[2000]	train-rmse:0.006529+5.40851e-05	test-rmse:0.0074794+0.000359412
[2010]	train-rmse:0.0065124+5.4054e-05	test-rmse:0.0074634+0.000360119
[2020]	train-rmse:0.0064962+5.38234e-05	test-rmse:0.0074478+0.000358425
[2030]	train-rmse:0.0064804+5.38724e-05	test-rmse:0.0074344+0.000356805
[2040]	train-rmse:0.0064652+5.36559e-05	test-rmse:0.0074248+0.000353832
[2050]	train-rmse:0.0064498+5.32969e-05	test-rmse:0.00741+0.000353727
[2060]	train-rmse:0.0064346+5.37572e-05	test-rmse:0.0073982+0.000353407
[2070]	train-rmse:0.0064192+5.31804e-05	test-rmse:0.0073828+0.000356769
[2080]	train-rmse:0.0064038+5.32105e-05	test-rmse:0.0073692+0.000357831
[2090]	train-rmse:0.0063884+5.33127e-05	test-rmse:0.007356+0.000360923
[2100]	train-rmse:0.0063738+5.35925e-05	test-rmse:0.0073446+0.000359553
[2110]	train-rmse:0.0063586+5.36753e-05	test-rmse:0.007331+0.000358761
[2120]	train-rmse:0.0063438+5.39793e-05	test-rmse:0.007316+0.000358519
[2130]	train-rmse:0.0063292+5.35291e-05	test-rmse:0.0073042+0.000357826
[2140]	train-rmse:0.0063148+5.37714e-05	test-rmse:0.0072928+0.000359183
[2150]	train-rmse:0.0063006+5.32564e-05	test-rmse:0.0072802+0.000357721
[2160]	train-rmse:0.0062864+5.31662e-05	test-rmse:0.0072692+0.000355265
[2170]	train-rmse:0.0062726+5.34326e-05	test-rmse:0.0072562+0.00035298
[2180]	train-rmse:0.0062582+5.3458e-05	test-rmse:0.007244+0.000352479
[2190]	train-rmse:0.0062442+5.3458e-05	test-rmse:0.0072314+0.000353658
[2200]	train-rmse:0.0062298+5.33119e-05	test-rmse:0.0072202+0.000353881
[2210]	train-rmse:0.0062156+5.30419e-05	test-rmse:0.0072076+0.000353022
[2220]	train-rmse:0.006202+5.33704e-05	test-rmse:0.007195+0.000352496
[2230]	train-rmse:0.0061884+5.29551e-05	test-rmse:0.007183+0.000350244
[2240]	train-rmse:0.0061748+5.2541e-05	test-rmse:0.0071722+0.000346945
[2250]	train-rmse:0.0061616+5.26255e-05	test-rmse:0.00716+0.000345483
[2260]	train-rmse:0.006148+5.19692e-05	test-rmse:0.0071472+0.000345045
[2270]	train-rmse:0.0061346+5.15542e-05	test-rmse:0.0071358+0.000343564
[2280]	train-rmse:0.006121+5.1482e-05	test-rmse:0.0071258+0.000345631
[2290]	train-rmse:0.0061084+5.12586e-05	test-rmse:0.0071146+0.000344385
[2300]	train-rmse:0.0060954+5.12586e-05	test-rmse:0.0071018+0.000344617
[2310]	train-rmse:0.0060818+5.08228e-05	test-rmse:0.00709+0.000345708
[2320]	train-rmse:0.0060688+5.10075e-05	test-rmse:0.0070802+0.000344767
[2330]	train-rmse:0.0060558+5.10075e-05	test-rmse:0.0070686+0.000344611
[2340]	train-rmse:0.0060434+5.12195e-05	test-rmse:0.0070608+0.0003442
[2350]	train-rmse:0.0060304+5.12195e-05	test-rmse:0.0070498+0.000343094
[2360]	train-rmse:0.006018+5.12601e-05	test-rmse:0.0070398+0.00034518
[2370]	train-rmse:0.006006+5.12601e-05	test-rmse:0.0070286+0.000343242
[2380]	train-rmse:0.005993+5.12601e-05	test-rmse:0.0070178+0.000346698
[2390]	train-rmse:0.0059808+5.05783e-05	test-rmse:0.0070058+0.000347347
[2400]	train-rmse:0.0059688+5.05783e-05	test-rmse:0.0069954+0.000347705
[2410]	train-rmse:0.005956+5.08409e-05	test-rmse:0.0069846+0.000349406
[2420]	train-rmse:0.005944+5.06636e-05	test-rmse:0.0069742+0.000349183
[2430]	train-rmse:0.0059322+5.13474e-05	test-rmse:0.0069628+0.000345272
[2440]	train-rmse:0.0059206+5.0918e-05	test-rmse:0.0069532+0.000347215
[2450]	train-rmse:0.0059088+5.07756e-05	test-rmse:0.0069436+0.000347645
[2460]	train-rmse:0.0058972+5.03444e-05	test-rmse:0.006931+0.000346597
[2470]	train-rmse:0.0058856+4.99143e-05	test-rmse:0.0069228+0.000346115
[2480]	train-rmse:0.005874+5.02792e-05	test-rmse:0.006912+0.000345423
[2490]	train-rmse:0.0058624+5.021e-05	test-rmse:0.0069052+0.000343875
[2500]	train-rmse:0.0058512+5.03444e-05	test-rmse:0.006895+0.000346873
[2510]	train-rmse:0.0058402+4.9757e-05	test-rmse:0.0068864+0.000345543
[2520]	train-rmse:0.005829+4.94894e-05	test-rmse:0.0068762+0.000344155
[2530]	train-rmse:0.005818+4.96749e-05	test-rmse:0.0068658+0.000345547
[2540]	train-rmse:0.005807+5.01238e-05	test-rmse:0.0068548+0.00034658
[2550]	train-rmse:0.0057956+4.99784e-05	test-rmse:0.0068472+0.000348006
[2560]	train-rmse:0.0057848+4.92804e-05	test-rmse:0.006838+0.000346952
[2570]	train-rmse:0.0057748+4.92804e-05	test-rmse:0.0068278+0.000346159
[2580]	train-rmse:0.0057636+4.88614e-05	test-rmse:0.0068198+0.000345292
[2590]	train-rmse:0.0057528+4.85032e-05	test-rmse:0.0068138+0.000342844
[2600]	train-rmse:0.005742+4.86128e-05	test-rmse:0.0068044+0.000346713
[2610]	train-rmse:0.0057312+4.81099e-05	test-rmse:0.0067942+0.000347274
[2620]	train-rmse:0.0057204+4.83678e-05	test-rmse:0.0067872+0.000346373
[2630]	train-rmse:0.0057096+4.79066e-05	test-rmse:0.0067784+0.000345832
[2640]	train-rmse:0.0056994+4.80358e-05	test-rmse:0.0067702+0.00034246
[2650]	train-rmse:0.0056888+4.78389e-05	test-rmse:0.0067618+0.000344728
[2660]	train-rmse:0.0056782+4.78264e-05	test-rmse:0.0067512+0.000346085
[2670]	train-rmse:0.0056682+4.72119e-05	test-rmse:0.0067432+0.000345246
[2680]	train-rmse:0.0056584+4.73312e-05	test-rmse:0.0067338+0.000346039
[2690]	train-rmse:0.005648+4.73455e-05	test-rmse:0.0067258+0.000345878
[2700]	train-rmse:0.0056374+4.68982e-05	test-rmse:0.0067178+0.00034488
[2710]	train-rmse:0.0056276+4.61978e-05	test-rmse:0.0067086+0.000345389
[2720]	train-rmse:0.0056178+4.65291e-05	test-rmse:0.0067004+0.000345373
[2730]	train-rmse:0.0056084+4.6723e-05	test-rmse:0.0066942+0.00034489
[2740]	train-rmse:0.0055986+4.64095e-05	test-rmse:0.0066834+0.000345716
[2750]	train-rmse:0.0055888+4.63569e-05	test-rmse:0.006674+0.000347892
[2760]	train-rmse:0.005579+4.6312e-05	test-rmse:0.0066662+0.000347746
[2770]	train-rmse:0.005569+4.64371e-05	test-rmse:0.0066586+0.000347499
[2780]	train-rmse:0.0055592+4.63957e-05	test-rmse:0.0066518+0.000347721
[2790]	train-rmse:0.0055496+4.59417e-05	test-rmse:0.0066444+0.000346145
[2800]	train-rmse:0.0055402+4.55693e-05	test-rmse:0.0066332+0.000345625
[2810]	train-rmse:0.00553+4.57165e-05	test-rmse:0.0066252+0.000345652
[2820]	train-rmse:0.0055206+4.53458e-05	test-rmse:0.0066164+0.000345158
[2830]	train-rmse:0.0055114+4.5535e-05	test-rmse:0.0066094+0.000346072
[2840]	train-rmse:0.005502+4.53299e-05	test-rmse:0.0066022+0.000346023
[2850]	train-rmse:0.005493+4.4775e-05	test-rmse:0.0065962+0.000347066
[2860]	train-rmse:0.0054838+4.45259e-05	test-rmse:0.0065902+0.000344943
[2870]	train-rmse:0.0054744+4.41706e-05	test-rmse:0.0065816+0.000343998
[2880]	train-rmse:0.005465+4.40772e-05	test-rmse:0.0065722+0.00034301
[2890]	train-rmse:0.0054562+4.42421e-05	test-rmse:0.006563+0.000342116
[2900]	train-rmse:0.0054472+4.44855e-05	test-rmse:0.006557+0.000342116
[2910]	train-rmse:0.0054382+4.39427e-05	test-rmse:0.006549+0.000341706
[2920]	train-rmse:0.0054298+4.36138e-05	test-rmse:0.00654+0.000339769
[2930]	train-rmse:0.005421+4.37493e-05	test-rmse:0.0065322+0.000338948
[2940]	train-rmse:0.0054124+4.35963e-05	test-rmse:0.0065276+0.000337738
[2950]	train-rmse:0.0054036+4.33663e-05	test-rmse:0.0065218+0.000339058
[2960]	train-rmse:0.0053952+4.30414e-05	test-rmse:0.0065158+0.00033932
[2970]	train-rmse:0.0053866+4.32463e-05	test-rmse:0.0065094+0.00033985
[2980]	train-rmse:0.0053784+4.33156e-05	test-rmse:0.0065052+0.00033683
[2990]	train-rmse:0.00537+4.37813e-05	test-rmse:0.0064982+0.000337783
[3000]	train-rmse:0.005362+4.36852e-05	test-rmse:0.0064924+0.000337457
[3010]	train-rmse:0.0053538+4.34345e-05	test-rmse:0.0064848+0.000338243
[3020]	train-rmse:0.0053452+4.32962e-05	test-rmse:0.0064786+0.000340176
[3030]	train-rmse:0.0053368+4.28831e-05	test-rmse:0.0064712+0.00034022
[3040]	train-rmse:0.0053288+4.28831e-05	test-rmse:0.0064636+0.000341188
[3050]	train-rmse:0.00532+4.31231e-05	test-rmse:0.0064574+0.000342673
[3060]	train-rmse:0.005312+4.31231e-05	test-rmse:0.0064502+0.000343006
[3070]	train-rmse:0.0053036+4.30748e-05	test-rmse:0.0064424+0.000343666
[3080]	train-rmse:0.005296+4.30395e-05	test-rmse:0.0064378+0.000343327
[3090]	train-rmse:0.0052878+4.27242e-05	test-rmse:0.0064314+0.000342772
[3100]	train-rmse:0.00528+4.25488e-05	test-rmse:0.0064256+0.000342675
[3110]	train-rmse:0.0052718+4.22441e-05	test-rmse:0.0064188+0.000343187
[3120]	train-rmse:0.0052638+4.20875e-05	test-rmse:0.006414+0.000345419
[3130]	train-rmse:0.005256+4.18808e-05	test-rmse:0.006408+0.000345497
[3140]	train-rmse:0.0052486+4.18502e-05	test-rmse:0.0064026+0.000345703
[3150]	train-rmse:0.0052408+4.194e-05	test-rmse:0.0063962+0.000345147
[3160]	train-rmse:0.0052328+4.18875e-05	test-rmse:0.0063912+0.000346362
[3170]	train-rmse:0.005225+4.21236e-05	test-rmse:0.0063856+0.000344485
[3180]	train-rmse:0.0052172+4.18254e-05	test-rmse:0.0063794+0.00034437
[3190]	train-rmse:0.0052094+4.1979e-05	test-rmse:0.0063728+0.000345083
[3200]	train-rmse:0.0052016+4.17162e-05	test-rmse:0.006367+0.000344732
[3210]	train-rmse:0.005194+4.20381e-05	test-rmse:0.0063612+0.000344079
[3220]	train-rmse:0.0051864+4.2349e-05	test-rmse:0.0063528+0.000345235
[3230]	train-rmse:0.0051788+4.27383e-05	test-rmse:0.0063464+0.000344043
[3240]	train-rmse:0.0051718+4.22488e-05	test-rmse:0.0063404+0.000345273
[3250]	train-rmse:0.0051644+4.21692e-05	test-rmse:0.0063332+0.000344964
[3260]	train-rmse:0.0051568+4.24895e-05	test-rmse:0.0063262+0.000343535
[3270]	train-rmse:0.0051496+4.22308e-05	test-rmse:0.0063204+0.000344273
[3280]	train-rmse:0.0051424+4.2136e-05	test-rmse:0.0063144+0.000343836
[3290]	train-rmse:0.005135+4.24876e-05	test-rmse:0.0063076+0.000344269
[3300]	train-rmse:0.0051276+4.22308e-05	test-rmse:0.0063022+0.000345857
[3310]	train-rmse:0.0051202+4.19113e-05	test-rmse:0.0062966+0.000346807
[3320]	train-rmse:0.005113+4.17277e-05	test-rmse:0.006291+0.000345596
[3330]	train-rmse:0.0051056+4.21455e-05	test-rmse:0.0062842+0.000348849
[3340]	train-rmse:0.0050984+4.19838e-05	test-rmse:0.0062776+0.000348864
[3350]	train-rmse:0.0050912+4.18254e-05	test-rmse:0.0062706+0.000351269
[3360]	train-rmse:0.0050842+4.17631e-05	test-rmse:0.006266+0.000350761
[3370]	train-rmse:0.0050772+4.23434e-05	test-rmse:0.0062604+0.000351214
[3380]	train-rmse:0.0050702+4.22156e-05	test-rmse:0.0062548+0.00035142
[3390]	train-rmse:0.0050632+4.22156e-05	test-rmse:0.0062472+0.000349779
[3400]	train-rmse:0.0050564+4.19695e-05	test-rmse:0.006244+0.000348882
[3410]	train-rmse:0.0050492+4.1811e-05	test-rmse:0.0062386+0.000347237
[3420]	train-rmse:0.0050424+4.20314e-05	test-rmse:0.0062334+0.000347343
[3430]	train-rmse:0.0050356+4.21312e-05	test-rmse:0.006227+0.000349225
[3440]	train-rmse:0.0050288+4.18922e-05	test-rmse:0.006221+0.000349235
[3450]	train-rmse:0.005022+4.16509e-05	test-rmse:0.0062148+0.000350533
[3460]	train-rmse:0.005015+4.16509e-05	test-rmse:0.00621+0.000352313
[3470]	train-rmse:0.0050086+4.20885e-05	test-rmse:0.0062046+0.000352535
[3480]	train-rmse:0.0050018+4.17823e-05	test-rmse:0.0061998+0.000353041
[3490]	train-rmse:0.0049952+4.17584e-05	test-rmse:0.0061954+0.000351953
[3500]	train-rmse:0.004989+4.20048e-05	test-rmse:0.006191+0.000349911
[3510]	train-rmse:0.0049826+4.2136e-05	test-rmse:0.0061874+0.000349217
[3520]	train-rmse:0.0049762+4.22298e-05	test-rmse:0.0061832+0.000350753
[3530]	train-rmse:0.00497+4.20048e-05	test-rmse:0.0061794+0.00035166
[3540]	train-rmse:0.0049638+4.22535e-05	test-rmse:0.0061764+0.000351904
[3550]	train-rmse:0.0049574+4.23821e-05	test-rmse:0.0061714+0.000351851
[3560]	train-rmse:0.0049506+4.24858e-05	test-rmse:0.006166+0.000352292
[3570]	train-rmse:0.0049444+4.23349e-05	test-rmse:0.0061608+0.000351997
[3580]	train-rmse:0.004938+4.24782e-05	test-rmse:0.0061568+0.00035125
[3590]	train-rmse:0.004932+4.24782e-05	test-rmse:0.0061522+0.000351022
[3600]	train-rmse:0.0049256+4.25046e-05	test-rmse:0.0061484+0.000351833
[3610]	train-rmse:0.0049194+4.24009e-05	test-rmse:0.0061446+0.00034957
[3620]	train-rmse:0.0049132+4.2654e-05	test-rmse:0.0061398+0.000348959
[3630]	train-rmse:0.004907+4.25018e-05	test-rmse:0.0061342+0.000350245
[3640]	train-rmse:0.0049006+4.25046e-05	test-rmse:0.0061302+0.000351218
[3650]	train-rmse:0.0048944+4.23537e-05	test-rmse:0.0061248+0.000350557
[3660]	train-rmse:0.0048884+4.22829e-05	test-rmse:0.0061194+0.000350261
[3670]	train-rmse:0.0048822+4.25366e-05	test-rmse:0.0061144+0.000351274
[3680]	train-rmse:0.0048762+4.20923e-05	test-rmse:0.0061094+0.000352472
[3690]	train-rmse:0.0048702+4.21587e-05	test-rmse:0.0061054+0.00035346
[3700]	train-rmse:0.0048642+4.20923e-05	test-rmse:0.0061014+0.000354437
[3710]	train-rmse:0.0048582+4.16817e-05	test-rmse:0.0060964+0.000352873
[3720]	train-rmse:0.0048524+4.17928e-05	test-rmse:0.006092+0.000353745
[3730]	train-rmse:0.004846+4.18999e-05	test-rmse:0.0060872+0.000352638
[3740]	train-rmse:0.0048404+4.22687e-05	test-rmse:0.0060828+0.000351819
[3750]	train-rmse:0.0048346+4.20124e-05	test-rmse:0.006081+0.000353008
[3760]	train-rmse:0.0048288+4.17536e-05	test-rmse:0.0060762+0.000350848
[3770]	train-rmse:0.0048226+4.20124e-05	test-rmse:0.006071+0.000350702
[3780]	train-rmse:0.0048166+4.19886e-05	test-rmse:0.0060662+0.000350069
[3790]	train-rmse:0.0048112+4.20923e-05	test-rmse:0.0060624+0.000349795
[3800]	train-rmse:0.0048052+4.20923e-05	test-rmse:0.0060576+0.000351153
[3810]	train-rmse:0.0047994+4.22497e-05	test-rmse:0.0060506+0.000352033
[3820]	train-rmse:0.004794+4.18903e-05	test-rmse:0.0060482+0.000352783
[3830]	train-rmse:0.0047886+4.16778e-05	test-rmse:0.0060438+0.000353754
[3840]	train-rmse:0.0047828+4.18014e-05	test-rmse:0.0060394+0.00035325
[3850]	train-rmse:0.0047776+4.20314e-05	test-rmse:0.006034+0.000354275
[3860]	train-rmse:0.0047722+4.21777e-05	test-rmse:0.0060296+0.000354568
[3870]	train-rmse:0.0047664+4.2297e-05	test-rmse:0.0060256+0.000355098
[3880]	train-rmse:0.004761+4.23462e-05	test-rmse:0.0060208+0.000356181
[3890]	train-rmse:0.004755+4.1938e-05	test-rmse:0.0060158+0.000356596
[3900]	train-rmse:0.0047496+4.24198e-05	test-rmse:0.0060096+0.000357035
[3910]	train-rmse:0.0047442+4.24989e-05	test-rmse:0.0060062+0.000356978
[3920]	train-rmse:0.0047386+4.24104e-05	test-rmse:0.0060022+0.000357546
[3930]	train-rmse:0.0047336+4.24104e-05	test-rmse:0.0059982+0.000359412
[3940]	train-rmse:0.0047278+4.25413e-05	test-rmse:0.0059928+0.000359707
[3950]	train-rmse:0.0047226+4.28093e-05	test-rmse:0.0059872+0.000362696
[3960]	train-rmse:0.004717+4.27598e-05	test-rmse:0.0059818+0.000365564
[3970]	train-rmse:0.0047114+4.32046e-05	test-rmse:0.005979+0.000364209
[3980]	train-rmse:0.0047066+4.33341e-05	test-rmse:0.0059742+0.00036292
[3990]	train-rmse:0.004701+4.35936e-05	test-rmse:0.005972+0.000363056
[4000]	train-rmse:0.0046956+4.33341e-05	test-rmse:0.0059678+0.000363768
[4010]	train-rmse:0.0046904+4.37017e-05	test-rmse:0.0059622+0.000363537
[4020]	train-rmse:0.0046856+4.38251e-05	test-rmse:0.0059576+0.00036368
[4030]	train-rmse:0.0046804+4.36971e-05	test-rmse:0.0059528+0.000363742
[4040]	train-rmse:0.004675+4.37858e-05	test-rmse:0.0059486+0.000362366
[4050]	train-rmse:0.0046698+4.36596e-05	test-rmse:0.0059442+0.000362269
[4060]	train-rmse:0.0046646+4.38753e-05	test-rmse:0.0059418+0.000361973
[4070]	train-rmse:0.0046594+4.41389e-05	test-rmse:0.0059394+0.000362214
[4080]	train-rmse:0.0046546+4.43694e-05	test-rmse:0.0059346+0.000362878
[4090]	train-rmse:0.0046498+4.41062e-05	test-rmse:0.00593+0.000361285
[4100]	train-rmse:0.0046452+4.40155e-05	test-rmse:0.0059278+0.000362241
[4110]	train-rmse:0.0046398+4.41561e-05	test-rmse:0.0059238+0.000362498
[4120]	train-rmse:0.004635+4.38908e-05	test-rmse:0.0059206+0.000363017
[4130]	train-rmse:0.0046298+4.37694e-05	test-rmse:0.0059172+0.000360747
[4140]	train-rmse:0.0046252+4.41244e-05	test-rmse:0.0059154+0.000360534
[4150]	train-rmse:0.00462+4.40045e-05	test-rmse:0.0059116+0.000362125
[4160]	train-rmse:0.004615+4.40045e-05	test-rmse:0.0059084+0.000359821
[4170]	train-rmse:0.0046104+4.39163e-05	test-rmse:0.0059052+0.000359737
[4180]	train-rmse:0.0046054+4.39163e-05	test-rmse:0.0059012+0.000359827
[4190]	train-rmse:0.0046004+4.39163e-05	test-rmse:0.0058978+0.000360005
[4200]	train-rmse:0.0045956+4.40935e-05	test-rmse:0.0058934+0.000361296
[4210]	train-rmse:0.0045908+4.43279e-05	test-rmse:0.0058904+0.000360187
[4220]	train-rmse:0.0045862+4.45708e-05	test-rmse:0.0058858+0.000361589
[4230]	train-rmse:0.0045814+4.48045e-05	test-rmse:0.0058814+0.000362599
[4240]	train-rmse:0.0045766+4.45358e-05	test-rmse:0.0058774+0.000361434
[4250]	train-rmse:0.0045718+4.4656e-05	test-rmse:0.0058758+0.000361306
[4260]	train-rmse:0.0045674+4.48045e-05	test-rmse:0.0058726+0.000360725
[4270]	train-rmse:0.0045624+4.43649e-05	test-rmse:0.0058678+0.000360021
[4280]	train-rmse:0.0045576+4.40935e-05	test-rmse:0.0058632+0.000360634
[4290]	train-rmse:0.0045526+4.44279e-05	test-rmse:0.005859+0.000362287
[4300]	train-rmse:0.0045484+4.48134e-05	test-rmse:0.0058566+0.000361951
[4310]	train-rmse:0.0045438+4.50573e-05	test-rmse:0.0058524+0.000363142
[4320]	train-rmse:0.0045388+4.50573e-05	test-rmse:0.0058486+0.000362112
[4330]	train-rmse:0.0045346+4.49293e-05	test-rmse:0.005844+0.000362574
[4340]	train-rmse:0.0045298+4.50573e-05	test-rmse:0.0058412+0.000362977
[4350]	train-rmse:0.0045252+4.50839e-05	test-rmse:0.005837+0.000361888
[4360]	train-rmse:0.0045206+4.49426e-05	test-rmse:0.0058338+0.000362473
[4370]	train-rmse:0.0045162+4.45798e-05	test-rmse:0.0058322+0.000359915
[4380]	train-rmse:0.0045118+4.50573e-05	test-rmse:0.0058276+0.000359149
[4390]	train-rmse:0.0045076+4.53281e-05	test-rmse:0.0058246+0.000360977
[4400]	train-rmse:0.004503+4.52813e-05	test-rmse:0.0058222+0.000359701
[4410]	train-rmse:0.0044986+4.49293e-05	test-rmse:0.0058198+0.00035769
[4420]	train-rmse:0.0044942+4.50218e-05	test-rmse:0.0058172+0.000357996
[4430]	train-rmse:0.0044896+4.49293e-05	test-rmse:0.0058146+0.000357683
[4440]	train-rmse:0.0044854+4.48134e-05	test-rmse:0.0058106+0.000360415
[4450]	train-rmse:0.0044806+4.50493e-05	test-rmse:0.0058102+0.000361173
[4460]	train-rmse:0.0044762+4.50218e-05	test-rmse:0.005806+0.000361012
[4470]	train-rmse:0.0044724+4.52575e-05	test-rmse:0.0058022+0.000359379
[4480]	train-rmse:0.0044678+4.56219e-05	test-rmse:0.005799+0.000360699
[4490]	train-rmse:0.0044634+4.52752e-05	test-rmse:0.0057944+0.000359855
[4500]	train-rmse:0.004459+4.57515e-05	test-rmse:0.005792+0.000360628
[4510]	train-rmse:0.004455+4.57515e-05	test-rmse:0.0057896+0.000359194
[4520]	train-rmse:0.0044502+4.58624e-05	test-rmse:0.005787+0.000359812
[4530]	train-rmse:0.0044464+4.55965e-05	test-rmse:0.0057846+0.000359445
[4540]	train-rmse:0.004442+4.57515e-05	test-rmse:0.0057816+0.000359374
[4550]	train-rmse:0.0044382+4.55956e-05	test-rmse:0.0057786+0.000359276
[4560]	train-rmse:0.0044338+4.59452e-05	test-rmse:0.0057754+0.000358977
[4570]	train-rmse:0.0044296+4.58327e-05	test-rmse:0.0057712+0.000360381
[4580]	train-rmse:0.0044254+4.5706e-05	test-rmse:0.0057666+0.000360973
[4590]	train-rmse:0.004421+4.53564e-05	test-rmse:0.0057628+0.000360312
[4600]	train-rmse:0.0044174+4.52221e-05	test-rmse:0.00576+0.000360621
[4610]	train-rmse:0.0044136+4.54603e-05	test-rmse:0.0057562+0.000360226
[4620]	train-rmse:0.0044096+4.54603e-05	test-rmse:0.005753+0.000360515
[4630]	train-rmse:0.0044058+4.55912e-05	test-rmse:0.0057496+0.000360891
[4640]	train-rmse:0.0044014+4.52221e-05	test-rmse:0.0057476+0.00036134
[4650]	train-rmse:0.0043972+4.54374e-05	test-rmse:0.0057456+0.000361493
[4660]	train-rmse:0.0043932+4.54374e-05	test-rmse:0.0057436+0.00036277
[4670]	train-rmse:0.0043892+4.55693e-05	test-rmse:0.0057418+0.000361988
[4680]	train-rmse:0.004385+4.54401e-05	test-rmse:0.005739+0.00036251
[4690]	train-rmse:0.0043812+4.52257e-05	test-rmse:0.005738+0.000360777
[4700]	train-rmse:0.0043772+4.52257e-05	test-rmse:0.0057346+0.00036147
[4710]	train-rmse:0.004373+4.54401e-05	test-rmse:0.0057316+0.000360731
[4720]	train-rmse:0.004369+4.54885e-05	test-rmse:0.0057288+0.000361057
[4730]	train-rmse:0.004365+4.59957e-05	test-rmse:0.0057268+0.000360164
[4740]	train-rmse:0.0043612+4.57882e-05	test-rmse:0.0057232+0.000358902
[4750]	train-rmse:0.004357+4.59957e-05	test-rmse:0.0057206+0.000359767
[4760]	train-rmse:0.004353+4.55456e-05	test-rmse:0.0057178+0.00036008
[4770]	train-rmse:0.0043494+4.54075e-05	test-rmse:0.0057158+0.000361562
[4780]	train-rmse:0.0043456+4.55394e-05	test-rmse:0.0057136+0.000363384
[4790]	train-rmse:0.0043418+4.62878e-05	test-rmse:0.0057118+0.00036243
[4800]	train-rmse:0.0043376+4.64956e-05	test-rmse:0.0057084+0.000360346
[4810]	train-rmse:0.0043338+4.62878e-05	test-rmse:0.0057056+0.000360532
[4820]	train-rmse:0.0043298+4.62878e-05	test-rmse:0.0057018+0.000360446
[4830]	train-rmse:0.004326+4.64198e-05	test-rmse:0.0056986+0.000360003
[4840]	train-rmse:0.0043222+4.65549e-05	test-rmse:0.0056962+0.000359735
[4850]	train-rmse:0.0043184+4.63491e-05	test-rmse:0.0056944+0.000360338
[4860]	train-rmse:0.0043146+4.65901e-05	test-rmse:0.0056924+0.000360219
[4870]	train-rmse:0.0043108+4.63871e-05	test-rmse:0.0056892+0.000359481
[4880]	train-rmse:0.0043066+4.65901e-05	test-rmse:0.0056866+0.000360346
[4890]	train-rmse:0.0043034+4.70089e-05	test-rmse:0.0056846+0.000359991
[4900]	train-rmse:0.0042992+4.67649e-05	test-rmse:0.0056828+0.000359685
[4910]	train-rmse:0.0042954+4.69025e-05	test-rmse:0.0056798+0.000360097
[4920]	train-rmse:0.0042918+4.67949e-05	test-rmse:0.0056756+0.000359861
[4930]	train-rmse:0.0042884+4.69025e-05	test-rmse:0.0056742+0.000359085
[4940]	train-rmse:0.0042848+4.69442e-05	test-rmse:0.005671+0.000358312
[4950]	train-rmse:0.004281+4.70829e-05	test-rmse:0.0056688+0.000359655
[4960]	train-rmse:0.0042774+4.69749e-05	test-rmse:0.0056692+0.000359593
[4970]	train-rmse:0.0042736+4.71152e-05	test-rmse:0.0056656+0.000359951
[4980]	train-rmse:0.00427+4.70106e-05	test-rmse:0.0056626+0.000358824
[4990]	train-rmse:0.0042664+4.66888e-05	test-rmse:0.0056596+0.000358519
[5000]	train-rmse:0.0042626+4.67786e-05	test-rmse:0.0056576+0.000358996
[5010]	train-rmse:0.0042588+4.6585e-05	test-rmse:0.0056552+0.000360048
[5020]	train-rmse:0.0042554+4.65386e-05	test-rmse:0.0056542+0.000358344
[5030]	train-rmse:0.0042518+4.6585e-05	test-rmse:0.0056492+0.000359647
[5040]	train-rmse:0.0042486+4.68641e-05	test-rmse:0.0056468+0.000358853
[5050]	train-rmse:0.0042448+4.6585e-05	test-rmse:0.0056446+0.0003585
[5060]	train-rmse:0.0042414+4.70557e-05	test-rmse:0.0056432+0.000356523
[5070]	train-rmse:0.0042378+4.6585e-05	test-rmse:0.0056396+0.000357316
[5080]	train-rmse:0.0042342+4.64861e-05	test-rmse:0.0056376+0.000358254
[5090]	train-rmse:0.0042306+4.68641e-05	test-rmse:0.005635+0.000358647
[5100]	train-rmse:0.0042274+4.70557e-05	test-rmse:0.0056316+0.000357361
[5110]	train-rmse:0.0042234+4.70557e-05	test-rmse:0.00563+0.000356951
[5120]	train-rmse:0.0042202+4.69655e-05	test-rmse:0.0056266+0.000358014
[5130]	train-rmse:0.0042166+4.68641e-05	test-rmse:0.005623+0.000359409
[5140]	train-rmse:0.0042132+4.69655e-05	test-rmse:0.0056204+0.000360734
[5150]	train-rmse:0.00421+4.71593e-05	test-rmse:0.0056182+0.000359273
[5160]	train-rmse:0.0042062+4.69655e-05	test-rmse:0.0056132+0.000361646
[5170]	train-rmse:0.0042026+4.73481e-05	test-rmse:0.0056096+0.000361827
[5180]	train-rmse:0.0041992+4.74021e-05	test-rmse:0.0056064+0.000361073
[5190]	train-rmse:0.0041956+4.73481e-05	test-rmse:0.005604+0.000362154
[5200]	train-rmse:0.004192+4.71593e-05	test-rmse:0.0056018+0.00036187
[5210]	train-rmse:0.0041886+4.69323e-05	test-rmse:0.0055978+0.000362353
[5220]	train-rmse:0.0041848+4.70719e-05	test-rmse:0.005594+0.000362327
[5230]	train-rmse:0.0041818+4.66579e-05	test-rmse:0.0055926+0.000359914
[5240]	train-rmse:0.0041786+4.69323e-05	test-rmse:0.0055908+0.00035969
[5250]	train-rmse:0.0041752+4.68845e-05	test-rmse:0.0055902+0.000359474
[5260]	train-rmse:0.004172+4.67461e-05	test-rmse:0.0055874+0.000359503
[5270]	train-rmse:0.0041688+4.70208e-05	test-rmse:0.005584+0.000359956
[5280]	train-rmse:0.0041656+4.72169e-05	test-rmse:0.005582+0.000358712
[5290]	train-rmse:0.0041622+4.74021e-05	test-rmse:0.0055794+0.00036027
[5300]	train-rmse:0.004159+4.72652e-05	test-rmse:0.0055766+0.000360325
[5310]	train-rmse:0.0041556+4.72974e-05	test-rmse:0.0055742+0.000359756
[5320]	train-rmse:0.0041522+4.74021e-05	test-rmse:0.0055722+0.000359676
[5330]	train-rmse:0.0041492+4.74021e-05	test-rmse:0.0055708+0.000361197
[5340]	train-rmse:0.004146+4.75983e-05	test-rmse:0.0055692+0.000360365
[5350]	train-rmse:0.0041428+4.75117e-05	test-rmse:0.005568+0.00036057
[5360]	train-rmse:0.0041396+4.72677e-05	test-rmse:0.0055668+0.000358601
[5370]	train-rmse:0.0041362+4.77385e-05	test-rmse:0.0055634+0.000358496
[5380]	train-rmse:0.004133+4.79375e-05	test-rmse:0.0055614+0.000359207
[5390]	train-rmse:0.0041294+4.79817e-05	test-rmse:0.0055594+0.000359666
[5400]	train-rmse:0.0041262+4.77385e-05	test-rmse:0.0055564+0.000358742
[5410]	train-rmse:0.0041232+4.78974e-05	test-rmse:0.0055538+0.000356599
[5420]	train-rmse:0.0041202+4.78974e-05	test-rmse:0.005553+0.00035737
[5430]	train-rmse:0.0041174+4.8144e-05	test-rmse:0.0055512+0.000358762
[5440]	train-rmse:0.0041142+4.78974e-05	test-rmse:0.0055498+0.000357325
[5450]	train-rmse:0.004111+4.77577e-05	test-rmse:0.0055468+0.000357391
[5460]	train-rmse:0.0041078+4.75117e-05	test-rmse:0.0055464+0.00035685
[5470]	train-rmse:0.0041046+4.77854e-05	test-rmse:0.0055442+0.000356763
[5480]	train-rmse:0.0041014+4.79817e-05	test-rmse:0.0055416+0.000356512
[5490]	train-rmse:0.0040982+4.78974e-05	test-rmse:0.0055388+0.000357343
[5500]	train-rmse:0.004095+4.77577e-05	test-rmse:0.0055358+0.000355092
[5510]	train-rmse:0.0040922+4.78974e-05	test-rmse:0.0055352+0.000356139
[5520]	train-rmse:0.004089+4.81705e-05	test-rmse:0.0055326+0.000356464
[5530]	train-rmse:0.0040862+4.83131e-05	test-rmse:0.0055308+0.000356192
[5540]	train-rmse:0.004083+4.81705e-05	test-rmse:0.0055288+0.000354568
[5550]	train-rmse:0.0040802+4.83131e-05	test-rmse:0.0055268+0.000355028
[5560]	train-rmse:0.0040772+4.83131e-05	test-rmse:0.005526+0.000353106
[5570]	train-rmse:0.0040742+4.83131e-05	test-rmse:0.0055236+0.000352714
[5580]	train-rmse:0.004071+4.81705e-05	test-rmse:0.0055214+0.000354057
[5590]	train-rmse:0.0040678+4.84454e-05	test-rmse:0.0055192+0.00035564
[5600]	train-rmse:0.0040646+4.86399e-05	test-rmse:0.0055162+0.000357424
[5610]	train-rmse:0.004061+4.81705e-05	test-rmse:0.0055118+0.000357995
[5620]	train-rmse:0.004058+4.81705e-05	test-rmse:0.005511+0.000357797
[5630]	train-rmse:0.0040552+4.84165e-05	test-rmse:0.0055066+0.000358633
[5640]	train-rmse:0.0040522+4.84165e-05	test-rmse:0.0055042+0.000358615
[5650]	train-rmse:0.004049+4.86909e-05	test-rmse:0.005502+0.000357334
[5660]	train-rmse:0.0040458+4.85527e-05	test-rmse:0.0055004+0.000357725
[5670]	train-rmse:0.004043+4.86909e-05	test-rmse:0.0054972+0.000357137
[5680]	train-rmse:0.0040402+4.89383e-05	test-rmse:0.005496+0.000355934
[5690]	train-rmse:0.0040374+4.86646e-05	test-rmse:0.0054948+0.000355522
[5700]	train-rmse:0.0040344+4.86646e-05	test-rmse:0.0054934+0.000354903
[5710]	train-rmse:0.0040314+4.86646e-05	test-rmse:0.00549+0.000354631
[5720]	train-rmse:0.004028+4.91325e-05	test-rmse:0.0054888+0.00035474
[5730]	train-rmse:0.0040252+4.92723e-05	test-rmse:0.0054856+0.000354247
[5740]	train-rmse:0.004022+4.90265e-05	test-rmse:0.0054848+0.000355032
[5750]	train-rmse:0.004019+4.95459e-05	test-rmse:0.0054836+0.000354241
[5760]	train-rmse:0.0040162+4.92723e-05	test-rmse:0.005481+0.000354972
[5770]	train-rmse:0.0040134+4.94959e-05	test-rmse:0.0054794+0.000354352
[5780]	train-rmse:0.0040108+4.93656e-05	test-rmse:0.005478+0.000354473
[5790]	train-rmse:0.004008+4.94449e-05	test-rmse:0.0054766+0.000355727
[5800]	train-rmse:0.004005+4.94449e-05	test-rmse:0.005474+0.000355294
[5810]	train-rmse:0.0040026+4.96411e-05	test-rmse:0.0054718+0.000355345
[5820]	train-rmse:0.0039998+4.97208e-05	test-rmse:0.0054696+0.000356214
[5830]	train-rmse:0.0039972+4.95919e-05	test-rmse:0.0054676+0.000356853
[5840]	train-rmse:0.0039944+4.93988e-05	test-rmse:0.0054666+0.000354506
[5850]	train-rmse:0.0039914+4.96733e-05	test-rmse:0.0054652+0.000352567
[5860]	train-rmse:0.0039882+4.95919e-05	test-rmse:0.0054622+0.00035297
[5870]	train-rmse:0.003986+4.95136e-05	test-rmse:0.0054598+0.000354417
[5880]	train-rmse:0.0039828+4.97088e-05	test-rmse:0.0054584+0.000354344
[5890]	train-rmse:0.0039802+4.96645e-05	test-rmse:0.005455+0.000355519
[5900]	train-rmse:0.0039776+4.95443e-05	test-rmse:0.0054542+0.000355135
[5910]	train-rmse:0.0039746+4.95443e-05	test-rmse:0.0054524+0.000355146
[5920]	train-rmse:0.0039716+4.95443e-05	test-rmse:0.0054498+0.00035613
[5930]	train-rmse:0.003969+4.95136e-05	test-rmse:0.0054486+0.000355078
[5940]	train-rmse:0.0039664+4.94716e-05	test-rmse:0.0054452+0.000355071
[5950]	train-rmse:0.0039638+4.95193e-05	test-rmse:0.005444+0.000355019
[5960]	train-rmse:0.0039606+4.92772e-05	test-rmse:0.005442+0.000355485
[5970]	train-rmse:0.003958+4.93275e-05	test-rmse:0.0054396+0.000354935
[5980]	train-rmse:0.0039554+4.92934e-05	test-rmse:0.0054386+0.000355261
[5990]	train-rmse:0.0039526+4.93623e-05	test-rmse:0.0054356+0.000354783
[6000]	train-rmse:0.0039496+4.93623e-05	test-rmse:0.0054334+0.000355299
[6010]	train-rmse:0.0039468+4.9604e-05	test-rmse:0.0054308+0.000356572
[6020]	train-rmse:0.0039446+4.93623e-05	test-rmse:0.0054294+0.000357216
[6030]	train-rmse:0.0039416+4.93623e-05	test-rmse:0.0054276+0.000358341
[6040]	train-rmse:0.0039388+4.94344e-05	test-rmse:0.0054256+0.000359114
[6050]	train-rmse:0.003936+4.96749e-05	test-rmse:0.0054234+0.000360161
[6060]	train-rmse:0.003933+4.96749e-05	test-rmse:0.0054222+0.00035869
[6070]	train-rmse:0.0039308+4.99536e-05	test-rmse:0.0054208+0.000357566
[6080]	train-rmse:0.0039282+4.98293e-05	test-rmse:0.005419+0.000358334
[6090]	train-rmse:0.0039252+4.98293e-05	test-rmse:0.005417+0.000359158
[6100]	train-rmse:0.0039226+5.01422e-05	test-rmse:0.0054138+0.000359427
[6110]	train-rmse:0.0039204+5.00704e-05	test-rmse:0.0054134+0.00035993
[6120]	train-rmse:0.0039176+5.0488e-05	test-rmse:0.0054112+0.000361138
[6130]	train-rmse:0.0039152+5.06928e-05	test-rmse:0.0054094+0.000360069
[6140]	train-rmse:0.0039126+5.05751e-05	test-rmse:0.0054084+0.000357833
[6150]	train-rmse:0.0039102+5.02649e-05	test-rmse:0.0054062+0.000357762
[6160]	train-rmse:0.0039074+5.05039e-05	test-rmse:0.0054044+0.000359809
[6170]	train-rmse:0.0039048+5.04674e-05	test-rmse:0.0054024+0.000359045
[6180]	train-rmse:0.0039024+5.05039e-05	test-rmse:0.0054006+0.000359553
[6190]	train-rmse:0.0038998+5.04674e-05	test-rmse:0.0054002+0.000358628
[6200]	train-rmse:0.0038972+5.00016e-05	test-rmse:0.0053988+0.000356867
[6210]	train-rmse:0.0038948+5.04674e-05	test-rmse:0.0053968+0.000356662
[6220]	train-rmse:0.0038922+5.03484e-05	test-rmse:0.0053962+0.00035535
[6230]	train-rmse:0.0038896+5.07449e-05	test-rmse:0.0053936+0.00035555
[6240]	train-rmse:0.003887+5.06281e-05	test-rmse:0.0053924+0.000355496
[6250]	train-rmse:0.0038844+5.05909e-05	test-rmse:0.00539+0.000356743
[6260]	train-rmse:0.0038816+5.07449e-05	test-rmse:0.0053884+0.000355815
[6270]	train-rmse:0.0038794+5.10239e-05	test-rmse:0.0053862+0.000355783
[6280]	train-rmse:0.0038766+5.07449e-05	test-rmse:0.0053846+0.000356396
[6290]	train-rmse:0.0038736+5.07449e-05	test-rmse:0.0053842+0.000356929
[6300]	train-rmse:0.0038714+5.05909e-05	test-rmse:0.005383+0.000359471
[6310]	train-rmse:0.0038686+5.0662e-05	test-rmse:0.0053804+0.00035919
[6320]	train-rmse:0.003866+5.06281e-05	test-rmse:0.0053788+0.000358849
[6330]	train-rmse:0.003864+5.06281e-05	test-rmse:0.0053776+0.000357665
[6340]	train-rmse:0.0038612+5.07835e-05	test-rmse:0.0053766+0.000357699
[6350]	train-rmse:0.003859+5.06281e-05	test-rmse:0.0053754+0.000356878
[6360]	train-rmse:0.0038562+5.04436e-05	test-rmse:0.005373+0.000357718
[6370]	train-rmse:0.0038538+5.03881e-05	test-rmse:0.005372+0.000357408
[6380]	train-rmse:0.0038514+5.05988e-05	test-rmse:0.0053676+0.00035889
[6390]	train-rmse:0.0038492+5.04436e-05	test-rmse:0.0053664+0.000358745
[6400]	train-rmse:0.0038466+5.06699e-05	test-rmse:0.0053636+0.000360056
[6410]	train-rmse:0.003844+5.02036e-05	test-rmse:0.0053606+0.000360257
[6420]	train-rmse:0.0038418+5.01334e-05	test-rmse:0.0053602+0.000359763
[6430]	train-rmse:0.0038392+5.03603e-05	test-rmse:0.0053586+0.00035909
[6440]	train-rmse:0.003837+5.02036e-05	test-rmse:0.0053568+0.000359014
[6450]	train-rmse:0.0038342+5.03603e-05	test-rmse:0.0053546+0.000358834
[6460]	train-rmse:0.0038318+5.04872e-05	test-rmse:0.0053544+0.000358095
[6470]	train-rmse:0.0038296+5.06699e-05	test-rmse:0.0053532+0.000357405
[6480]	train-rmse:0.0038268+5.07441e-05	test-rmse:0.0053502+0.000357489
[6490]	train-rmse:0.0038246+5.06699e-05	test-rmse:0.0053492+0.000358088
[6500]	train-rmse:0.003822+5.10647e-05	test-rmse:0.0053466+0.000358238
[6510]	train-rmse:0.0038198+5.08268e-05	test-rmse:0.0053444+0.000356194
[6520]	train-rmse:0.0038174+5.05197e-05	test-rmse:0.0053438+0.000356544
[6530]	train-rmse:0.003815+5.0545e-05	test-rmse:0.0053424+0.000357599
[6540]	train-rmse:0.0038126+5.05909e-05	test-rmse:0.0053402+0.000358772
[6550]	train-rmse:0.0038102+5.03603e-05	test-rmse:0.0053384+0.0003598
[6560]	train-rmse:0.0038078+4.99656e-05	test-rmse:0.0053366+0.000360488
[6570]	train-rmse:0.0038054+5.04325e-05	test-rmse:0.005335+0.000359999
[6580]	train-rmse:0.003803+5.02036e-05	test-rmse:0.0053336+0.000358888
[6590]	train-rmse:0.0038006+5.02498e-05	test-rmse:0.0053328+0.000358394
[6600]	train-rmse:0.0037984+5.01781e-05	test-rmse:0.0053304+0.000358328
[6610]	train-rmse:0.003796+4.98718e-05	test-rmse:0.0053288+0.000358135
[6620]	train-rmse:0.0037934+5.01781e-05	test-rmse:0.0053278+0.000357288
[6630]	train-rmse:0.0037912+5.04635e-05	test-rmse:0.0053268+0.000358684
[6640]	train-rmse:0.003789+5.02275e-05	test-rmse:0.0053252+0.000358326
[6650]	train-rmse:0.0037866+4.98341e-05	test-rmse:0.0053232+0.000358662
[6660]	train-rmse:0.0037842+5.03007e-05	test-rmse:0.0053218+0.000359456
[6670]	train-rmse:0.003782+5.02275e-05	test-rmse:0.0053194+0.000359354
[6680]	train-rmse:0.0037798+5.00696e-05	test-rmse:0.0053182+0.000358194
[6690]	train-rmse:0.0037776+5.03571e-05	test-rmse:0.0053174+0.000356601
[6700]	train-rmse:0.0037754+5.05355e-05	test-rmse:0.0053158+0.000358302
[6710]	train-rmse:0.0037734+5.10592e-05	test-rmse:0.0053138+0.000357669
[6720]	train-rmse:0.0037712+5.08228e-05	test-rmse:0.0053102+0.000357884
[6730]	train-rmse:0.003769+5.07504e-05	test-rmse:0.0053082+0.000357158
[6740]	train-rmse:0.0037664+5.10592e-05	test-rmse:0.0053068+0.000357321
[6750]	train-rmse:0.0037644+5.09808e-05	test-rmse:0.0053054+0.000355903
[6760]	train-rmse:0.0037624+5.09808e-05	test-rmse:0.005303+0.000356396
[6770]	train-rmse:0.0037604+5.09808e-05	test-rmse:0.0053006+0.000357087
[6780]	train-rmse:0.003758+5.11116e-05	test-rmse:0.0052986+0.000357101
[6790]	train-rmse:0.0037558+5.10388e-05	test-rmse:0.0052978+0.000357448
[6800]	train-rmse:0.0037534+5.06462e-05	test-rmse:0.005297+0.000358405
[6810]	train-rmse:0.003751+5.11116e-05	test-rmse:0.0052958+0.000358267
[6820]	train-rmse:0.0037492+5.09368e-05	test-rmse:0.0052946+0.000357478
[6830]	train-rmse:0.003747+5.08645e-05	test-rmse:0.0052932+0.0003581
[6840]	train-rmse:0.0037444+5.11726e-05	test-rmse:0.0052912+0.00035805
[6850]	train-rmse:0.0037424+5.11726e-05	test-rmse:0.0052892+0.000357798
[6860]	train-rmse:0.0037402+5.13474e-05	test-rmse:0.0052882+0.000356782
[6870]	train-rmse:0.003738+5.11937e-05	test-rmse:0.005288+0.000357589
[6880]	train-rmse:0.0037356+5.14105e-05	test-rmse:0.0052864+0.000357523
[6890]	train-rmse:0.0037336+5.14105e-05	test-rmse:0.0052852+0.000356836
[6900]	train-rmse:0.0037316+5.14105e-05	test-rmse:0.0052838+0.000356425
[6910]	train-rmse:0.0037292+5.18745e-05	test-rmse:0.0052828+0.000357658
[6920]	train-rmse:0.0037274+5.15853e-05	test-rmse:0.0052802+0.000356787
[6930]	train-rmse:0.0037252+5.18745e-05	test-rmse:0.0052794+0.000355629
[6940]	train-rmse:0.0037226+5.16589e-05	test-rmse:0.0052792+0.000355451
[6950]	train-rmse:0.0037206+5.16589e-05	test-rmse:0.0052776+0.000354633
[6960]	train-rmse:0.0037182+5.17123e-05	test-rmse:0.0052766+0.000355124
[6970]	train-rmse:0.003716+5.16372e-05	test-rmse:0.0052762+0.000354682
[6980]	train-rmse:0.003714+5.16372e-05	test-rmse:0.0052756+0.000353894
[6990]	train-rmse:0.0037122+5.17123e-05	test-rmse:0.0052746+0.000354625
[7000]	train-rmse:0.00371+5.16372e-05	test-rmse:0.0052734+0.00035547
[7010]	train-rmse:0.0037078+5.14836e-05	test-rmse:0.0052714+0.000356175
[7020]	train-rmse:0.003706+5.13108e-05	test-rmse:0.0052706+0.000355982
[7030]	train-rmse:0.0037036+5.17749e-05	test-rmse:0.0052684+0.000356145
[7040]	train-rmse:0.0037014+5.15387e-05	test-rmse:0.0052664+0.000355184
[7050]	train-rmse:0.0036994+5.12156e-05	test-rmse:0.0052654+0.000355184
[7060]	train-rmse:0.0036974+5.15387e-05	test-rmse:0.005263+0.000355807
[7070]	train-rmse:0.0036956+5.08394e-05	test-rmse:0.005261+0.00035609
[7080]	train-rmse:0.0036932+5.13046e-05	test-rmse:0.005261+0.000354914
[7090]	train-rmse:0.0036916+5.12117e-05	test-rmse:0.0052594+0.000355921
[7100]	train-rmse:0.0036894+5.13833e-05	test-rmse:0.0052572+0.0003566
[7110]	train-rmse:0.0036874+5.13833e-05	test-rmse:0.005256+0.000356978
[7120]	train-rmse:0.0036854+5.14611e-05	test-rmse:0.0052556+0.000355365
[7130]	train-rmse:0.0036828+5.13669e-05	test-rmse:0.0052536+0.000355542
[7140]	train-rmse:0.0036808+5.16968e-05	test-rmse:0.0052506+0.000356401
[7150]	train-rmse:0.0036792+5.14836e-05	test-rmse:0.0052506+0.000356899
[7160]	train-rmse:0.0036768+5.12461e-05	test-rmse:0.0052498+0.000356967
[7170]	train-rmse:0.0036746+5.12742e-05	test-rmse:0.0052478+0.000356416
[7180]	train-rmse:0.0036724+5.12664e-05	test-rmse:0.0052474+0.000356014
[7190]	train-rmse:0.0036704+5.11922e-05	test-rmse:0.0052464+0.00035579
[7200]	train-rmse:0.0036684+5.11922e-05	test-rmse:0.0052446+0.000356045
[7210]	train-rmse:0.0036666+5.10161e-05	test-rmse:0.0052432+0.000356284
[7220]	train-rmse:0.0036642+5.10349e-05	test-rmse:0.0052418+0.000355721
[7230]	train-rmse:0.0036624+5.0859e-05	test-rmse:0.0052406+0.000356469
[7240]	train-rmse:0.0036606+5.12742e-05	test-rmse:0.0052384+0.000356266
[7250]	train-rmse:0.0036586+5.1391e-05	test-rmse:0.005238+0.000356101
[7260]	train-rmse:0.0036566+5.14222e-05	test-rmse:0.0052374+0.000357058
[7270]	train-rmse:0.0036548+5.1503e-05	test-rmse:0.0052358+0.00035614
[7280]	train-rmse:0.0036528+5.1503e-05	test-rmse:0.0052334+0.000357008
[7290]	train-rmse:0.0036508+5.1503e-05	test-rmse:0.005232+0.000357459
[7300]	train-rmse:0.0036488+5.1503e-05	test-rmse:0.005232+0.000357721
[7310]	train-rmse:0.0036468+5.1503e-05	test-rmse:0.0052312+0.000357135
[7320]	train-rmse:0.0036446+5.12742e-05	test-rmse:0.0052296+0.000358109
[7330]	train-rmse:0.0036426+5.12742e-05	test-rmse:0.005228+0.000358356
[7340]	train-rmse:0.0036406+5.12742e-05	test-rmse:0.0052276+0.000357634
[7350]	train-rmse:0.0036384+5.14533e-05	test-rmse:0.0052262+0.000358345
[7360]	train-rmse:0.003637+5.12133e-05	test-rmse:0.005225+0.000356663
[7370]	train-rmse:0.003635+5.16604e-05	test-rmse:0.0052246+0.000357202
[7380]	train-rmse:0.0036332+5.13708e-05	test-rmse:0.005223+0.000357255
[7390]	train-rmse:0.0036312+5.13708e-05	test-rmse:0.0052212+0.000357211
[7400]	train-rmse:0.0036292+5.14836e-05	test-rmse:0.0052194+0.000357183
[7410]	train-rmse:0.0036272+5.14836e-05	test-rmse:0.0052174+0.000357448
[7420]	train-rmse:0.0036254+5.11922e-05	test-rmse:0.0052164+0.00035655
[7430]	train-rmse:0.0036234+5.1531e-05	test-rmse:0.0052148+0.000357251
[7440]	train-rmse:0.0036214+5.1531e-05	test-rmse:0.0052124+0.000358312
[7450]	train-rmse:0.0036198+5.15806e-05	test-rmse:0.0052112+0.000357325
[7460]	train-rmse:0.003618+5.16604e-05	test-rmse:0.0052102+0.000357727
[7470]	train-rmse:0.003616+5.16604e-05	test-rmse:0.00521+0.000357629
[7480]	train-rmse:0.003614+5.16604e-05	test-rmse:0.0052088+0.000356915
[7490]	train-rmse:0.003612+5.21114e-05	test-rmse:0.0052072+0.000357976
[7500]	train-rmse:0.0036102+5.18205e-05	test-rmse:0.0052058+0.000358223
[7510]	train-rmse:0.0036082+5.18205e-05	test-rmse:0.0052062+0.000357029
[7520]	train-rmse:0.0036064+5.20484e-05	test-rmse:0.0052068+0.000357542
[7530]	train-rmse:0.0036044+5.20484e-05	test-rmse:0.005205+0.000358805
[7540]	train-rmse:0.0036026+5.21291e-05	test-rmse:0.005203+0.00035722
[7550]	train-rmse:0.0036008+5.19515e-05	test-rmse:0.005201+0.000358381
[7560]	train-rmse:0.0035988+5.19515e-05	test-rmse:0.0051998+0.000357546
[7570]	train-rmse:0.0035968+5.19515e-05	test-rmse:0.0051986+0.000357508
[7580]	train-rmse:0.003595+5.21805e-05	test-rmse:0.0051966+0.000356767
[7590]	train-rmse:0.0035932+5.22624e-05	test-rmse:0.0051952+0.000357847
[7600]	train-rmse:0.0035914+5.24923e-05	test-rmse:0.0051942+0.000356772
[7610]	train-rmse:0.0035896+5.26483e-05	test-rmse:0.0051928+0.000356874
[7620]	train-rmse:0.0035878+5.24725e-05	test-rmse:0.0051906+0.000356489
[7630]	train-rmse:0.0035858+5.24725e-05	test-rmse:0.0051896+0.000357221
[7640]	train-rmse:0.0035844+5.24198e-05	test-rmse:0.0051886+0.000356201
[7650]	train-rmse:0.0035824+5.24198e-05	test-rmse:0.0051878+0.000356857
[7660]	train-rmse:0.0035804+5.24198e-05	test-rmse:0.0051886+0.000355879
[7670]	train-rmse:0.0035788+5.23274e-05	test-rmse:0.0051876+0.000356004
[7680]	train-rmse:0.0035768+5.23274e-05	test-rmse:0.0051874+0.000355175
[7690]	train-rmse:0.0035752+5.22624e-05	test-rmse:0.0051868+0.000355788
[7700]	train-rmse:0.0035732+5.22624e-05	test-rmse:0.005185+0.000355277
[7710]	train-rmse:0.0035712+5.22624e-05	test-rmse:0.0051826+0.000355844
[7720]	train-rmse:0.0035692+5.22624e-05	test-rmse:0.0051806+0.000355418
[7730]	train-rmse:0.0035676+5.21291e-05	test-rmse:0.0051786+0.000355358
[7740]	train-rmse:0.0035656+5.21291e-05	test-rmse:0.005178+0.000355528
[7750]	train-rmse:0.0035638+5.22892e-05	test-rmse:0.0051768+0.000355082
[7760]	train-rmse:0.0035622+5.18205e-05	test-rmse:0.0051754+0.000354631
[7770]	train-rmse:0.0035602+5.18205e-05	test-rmse:0.0051728+0.000354349
[7780]	train-rmse:0.0035584+5.19022e-05	test-rmse:0.0051712+0.000355789
[7790]	train-rmse:0.0035566+5.20638e-05	test-rmse:0.0051716+0.000356687
[7800]	train-rmse:0.0035548+5.14331e-05	test-rmse:0.0051708+0.000357377
[7810]	train-rmse:0.0035528+5.14331e-05	test-rmse:0.0051702+0.000358012
[7820]	train-rmse:0.0035514+5.13872e-05	test-rmse:0.0051696+0.000358069
[7830]	train-rmse:0.0035496+5.16124e-05	test-rmse:0.005168+0.000357146
[7840]	train-rmse:0.0035476+5.16124e-05	test-rmse:0.0051664+0.000356736
[7850]	train-rmse:0.003546+5.1517e-05	test-rmse:0.0051656+0.00035593
[7860]	train-rmse:0.0035444+5.19022e-05	test-rmse:0.0051664+0.000355355
[7870]	train-rmse:0.0035424+5.19022e-05	test-rmse:0.0051664+0.000356003
[7880]	train-rmse:0.0035404+5.19022e-05	test-rmse:0.0051646+0.000355132
[7890]	train-rmse:0.003539+5.18575e-05	test-rmse:0.0051636+0.00035446
[7900]	train-rmse:0.0035372+5.16775e-05	test-rmse:0.0051614+0.000354816
[7910]	train-rmse:0.0035352+5.16775e-05	test-rmse:0.005161+0.000355649
[7920]	train-rmse:0.0035334+5.19022e-05	test-rmse:0.00516+0.000356593
[7930]	train-rmse:0.0035318+5.21475e-05	test-rmse:0.0051588+0.000356993
[7940]	train-rmse:0.0035302+5.20822e-05	test-rmse:0.0051572+0.00035419
[7950]	train-rmse:0.0035284+5.13872e-05	test-rmse:0.0051556+0.000354608
[7960]	train-rmse:0.0035266+5.14727e-05	test-rmse:0.005155+0.000353607
[7970]	train-rmse:0.0035248+5.16349e-05	test-rmse:0.0051536+0.000353338
[7980]	train-rmse:0.0035232+5.16775e-05	test-rmse:0.0051522+0.000354339
[7990]	train-rmse:0.0035212+5.16775e-05	test-rmse:0.005151+0.000354828
[8000]	train-rmse:0.0035196+5.15853e-05	test-rmse:0.0051486+0.000354877
[8010]	train-rmse:0.0035178+5.12929e-05	test-rmse:0.0051472+0.000354886
[8020]	train-rmse:0.0035158+5.17471e-05	test-rmse:0.0051476+0.000353459
[8030]	train-rmse:0.0035144+5.13638e-05	test-rmse:0.005146+0.000352346
[8040]	train-rmse:0.0035128+5.17471e-05	test-rmse:0.0051444+0.00035273
[8050]	train-rmse:0.0035108+5.17471e-05	test-rmse:0.0051438+0.000352949
[8060]	train-rmse:0.0035094+5.18212e-05	test-rmse:0.0051434+0.000351674
[8070]	train-rmse:0.0035078+5.16929e-05	test-rmse:0.005144+0.000351302
[8080]	train-rmse:0.0035058+5.16929e-05	test-rmse:0.0051398+0.000351432
[8090]	train-rmse:0.0035046+5.14766e-05	test-rmse:0.0051396+0.000352192
[8100]	train-rmse:0.0035028+5.16929e-05	test-rmse:0.0051382+0.000351929
[8110]	train-rmse:0.003501+5.1517e-05	test-rmse:0.0051376+0.000350785
[8120]	train-rmse:0.0034996+5.16085e-05	test-rmse:0.0051378+0.000351032
[8130]	train-rmse:0.0034978+5.16929e-05	test-rmse:0.0051372+0.000350702
[8140]	train-rmse:0.0034962+5.16039e-05	test-rmse:0.0051368+0.000350648
[8150]	train-rmse:0.0034948+5.16929e-05	test-rmse:0.0051362+0.000351379
[8160]	train-rmse:0.0034928+5.16929e-05	test-rmse:0.005136+0.000351764
[8170]	train-rmse:0.0034916+5.14766e-05	test-rmse:0.0051348+0.000351061
[8180]	train-rmse:0.0034896+5.14766e-05	test-rmse:0.0051338+0.000351619
[8190]	train-rmse:0.0034876+5.14766e-05	test-rmse:0.0051332+0.00035225
[8200]	train-rmse:0.0034862+5.19477e-05	test-rmse:0.0051316+0.000350785
[8210]	train-rmse:0.0034844+5.1655e-05	test-rmse:0.0051306+0.000351525
[8220]	train-rmse:0.0034828+5.15651e-05	test-rmse:0.005129+0.000350791
[8230]	train-rmse:0.003481+5.17803e-05	test-rmse:0.005127+0.000351627
[8240]	train-rmse:0.0034794+5.1655e-05	test-rmse:0.0051246+0.00035209
[8250]	train-rmse:0.0034776+5.1744e-05	test-rmse:0.0051248+0.000350752
[8260]	train-rmse:0.003476+5.16217e-05	test-rmse:0.005125+0.000351559
[8270]	train-rmse:0.0034744+5.1531e-05	test-rmse:0.005123+0.000350708
[8280]	train-rmse:0.0034726+5.1744e-05	test-rmse:0.0051206+0.000351366
[8290]	train-rmse:0.0034714+5.1531e-05	test-rmse:0.0051212+0.000350498
[8300]	train-rmse:0.0034698+5.17934e-05	test-rmse:0.0051204+0.000349506
[8310]	train-rmse:0.003468+5.20038e-05	test-rmse:0.0051182+0.000349842
[8320]	train-rmse:0.0034666+5.1624e-05	test-rmse:0.0051182+0.000351314
[8330]	train-rmse:0.003465+5.20038e-05	test-rmse:0.0051172+0.00035215
[8340]	train-rmse:0.0034634+5.2267e-05	test-rmse:0.005117+0.000352721
[8350]	train-rmse:0.0034618+5.21858e-05	test-rmse:0.0051156+0.000352902
[8360]	train-rmse:0.00346+5.20038e-05	test-rmse:0.0051156+0.000352916
[8370]	train-rmse:0.0034584+5.2267e-05	test-rmse:0.005115+0.000352453
[8380]	train-rmse:0.0034568+5.21858e-05	test-rmse:0.0051122+0.000353801
[8390]	train-rmse:0.0034552+5.24496e-05	test-rmse:0.0051108+0.000353392
[8400]	train-rmse:0.0034534+5.2267e-05	test-rmse:0.0051094+0.000353106
[8410]	train-rmse:0.0034518+5.21475e-05	test-rmse:0.0051084+0.000353751
[8420]	train-rmse:0.0034504+5.2267e-05	test-rmse:0.0051082+0.000353407
[8430]	train-rmse:0.0034486+5.24389e-05	test-rmse:0.0051068+0.000352833
[8440]	train-rmse:0.0034474+5.22326e-05	test-rmse:0.0051056+0.000351768
[8450]	train-rmse:0.0034454+5.22326e-05	test-rmse:0.0051036+0.000352232
[8460]	train-rmse:0.0034442+5.21398e-05	test-rmse:0.005102+0.000352068
[8470]	train-rmse:0.0034424+5.22326e-05	test-rmse:0.0051012+0.000351943
[8480]	train-rmse:0.0034408+5.17587e-05	test-rmse:0.0051004+0.000351414
[8490]	train-rmse:0.0034392+5.20592e-05	test-rmse:0.0051+0.000351017
[8500]	train-rmse:0.0034376+5.20523e-05	test-rmse:0.0050986+0.00035109
[8510]	train-rmse:0.003436+5.18536e-05	test-rmse:0.0050976+0.00035109
[8520]	train-rmse:0.0034346+5.15853e-05	test-rmse:0.0050966+0.000350723
[8530]	train-rmse:0.003433+5.18536e-05	test-rmse:0.0050964+0.000350687
[8540]	train-rmse:0.0034316+5.19446e-05	test-rmse:0.0050966+0.000350813
[8550]	train-rmse:0.0034298+5.16504e-05	test-rmse:0.0050958+0.000351894
[8560]	train-rmse:0.0034286+5.19446e-05	test-rmse:0.0050946+0.00035203
[8570]	train-rmse:0.003427+5.18536e-05	test-rmse:0.0050946+0.00035073
[8580]	train-rmse:0.0034256+5.19446e-05	test-rmse:0.0050934+0.000349373
[8590]	train-rmse:0.0034238+5.16504e-05	test-rmse:0.0050918+0.000350213
[8600]	train-rmse:0.0034224+5.1744e-05	test-rmse:0.005091+0.000350272
[8610]	train-rmse:0.0034208+5.16504e-05	test-rmse:0.0050896+0.000349502
[8620]	train-rmse:0.0034194+5.12742e-05	test-rmse:0.0050884+0.000350082
[8630]	train-rmse:0.0034176+5.14494e-05	test-rmse:0.0050874+0.000349524
[8640]	train-rmse:0.0034164+5.12742e-05	test-rmse:0.0050854+0.000348876
[8650]	train-rmse:0.0034146+5.14494e-05	test-rmse:0.0050838+0.000350376
[8660]	train-rmse:0.0034132+5.11523e-05	test-rmse:0.0050832+0.000351334
[8670]	train-rmse:0.0034118+5.11562e-05	test-rmse:0.0050836+0.00034976
[8680]	train-rmse:0.0034106+5.14494e-05	test-rmse:0.0050822+0.000349013
[8690]	train-rmse:0.0034088+5.12695e-05	test-rmse:0.005081+0.000349607
[8700]	train-rmse:0.0034074+5.12508e-05	test-rmse:0.0050796+0.000350237
[8710]	train-rmse:0.003406+5.14471e-05	test-rmse:0.005079+0.000350164
[8720]	train-rmse:0.0034044+5.12508e-05	test-rmse:0.0050782+0.00034976
[8730]	train-rmse:0.0034026+5.10709e-05	test-rmse:0.0050774+0.000349345
[8740]	train-rmse:0.0034012+5.11523e-05	test-rmse:0.0050762+0.000350128
[8750]	train-rmse:0.0033994+5.12508e-05	test-rmse:0.0050754+0.000350936
[8760]	train-rmse:0.003398+5.09549e-05	test-rmse:0.005075+0.000349985
[8770]	train-rmse:0.0033968+5.125e-05	test-rmse:0.0050744+0.000351854
[8780]	train-rmse:0.0033952+5.11523e-05	test-rmse:0.0050742+0.000351613
[8790]	train-rmse:0.0033938+5.125e-05	test-rmse:0.0050728+0.000352704
[8800]	train-rmse:0.0033924+5.12508e-05	test-rmse:0.0050718+0.000351956
[8810]	train-rmse:0.0033906+5.143e-05	test-rmse:0.0050706+0.000351808
[8820]	train-rmse:0.0033892+5.10545e-05	test-rmse:0.0050696+0.000351798
[8830]	train-rmse:0.0033878+5.11719e-05	test-rmse:0.0050688+0.000352319
[8840]	train-rmse:0.0033862+5.10545e-05	test-rmse:0.0050672+0.000352625
[8850]	train-rmse:0.0033846+5.10553e-05	test-rmse:0.0050664+0.000352768
[8860]	train-rmse:0.0033832+5.11367e-05	test-rmse:0.005066+0.00035266
[8870]	train-rmse:0.0033818+5.12383e-05	test-rmse:0.0050656+0.000354042
[8880]	train-rmse:0.0033804+5.09572e-05	test-rmse:0.0050652+0.000354034
[8890]	train-rmse:0.0033792+5.11367e-05	test-rmse:0.0050644+0.000355028
[8900]	train-rmse:0.0033774+5.09572e-05	test-rmse:0.0050634+0.000354479
[8910]	train-rmse:0.0033766+5.10553e-05	test-rmse:0.0050624+0.000355154
[8920]	train-rmse:0.0033746+5.10553e-05	test-rmse:0.0050618+0.000355638
[8930]	train-rmse:0.003373+5.09431e-05	test-rmse:0.005061+0.000354875
[8940]	train-rmse:0.0033716+5.10553e-05	test-rmse:0.0050594+0.000355078
[8950]	train-rmse:0.00337+5.09431e-05	test-rmse:0.0050578+0.000355548
[8960]	train-rmse:0.0033688+5.12383e-05	test-rmse:0.0050568+0.000356119
[8970]	train-rmse:0.0033674+5.12351e-05	test-rmse:0.0050574+0.000355646
[8980]	train-rmse:0.0033656+5.10553e-05	test-rmse:0.0050566+0.000355248
[8990]	train-rmse:0.0033646+5.10553e-05	test-rmse:0.0050562+0.000355037
[9000]	train-rmse:0.003363+5.05806e-05	test-rmse:0.0050546+0.000355136
[9010]	train-rmse:0.0033612+5.07638e-05	test-rmse:0.005053+0.000354506
[9020]	train-rmse:0.00336+5.10607e-05	test-rmse:0.005052+0.000355528
[9030]	train-rmse:0.0033588+5.12383e-05	test-rmse:0.005052+0.000356925
[9040]	train-rmse:0.0033572+5.07638e-05	test-rmse:0.0050516+0.000356713
[9050]	train-rmse:0.003356+5.05806e-05	test-rmse:0.005051+0.000355244
[9060]	train-rmse:0.0033542+5.07638e-05	test-rmse:0.0050502+0.00035456
[9070]	train-rmse:0.003353+5.06675e-05	test-rmse:0.0050498+0.000355002
[9080]	train-rmse:0.0033512+5.07638e-05	test-rmse:0.0050496+0.000354192
[9090]	train-rmse:0.00335+5.06675e-05	test-rmse:0.0050498+0.000353902
[9100]	train-rmse:0.0033482+5.07638e-05	test-rmse:0.0050482+0.000353744
[9110]	train-rmse:0.0033472+5.07638e-05	test-rmse:0.0050474+0.000353036
[9120]	train-rmse:0.0033454+5.09494e-05	test-rmse:0.0050466+0.000353459
[9130]	train-rmse:0.0033442+5.07638e-05	test-rmse:0.005047+0.000352668
[9140]	train-rmse:0.0033424+5.09494e-05	test-rmse:0.0050466+0.000353489
[9150]	train-rmse:0.0033412+5.07598e-05	test-rmse:0.0050448+0.000354483
[9160]	train-rmse:0.0033396+5.10474e-05	test-rmse:0.005045+0.000354177
[9170]	train-rmse:0.0033382+5.07598e-05	test-rmse:0.0050448+0.000355146
[9180]	train-rmse:0.003337+5.10568e-05	test-rmse:0.0050436+0.000354555
[9190]	train-rmse:0.0033358+5.087e-05	test-rmse:0.0050434+0.000355587
[9200]	train-rmse:0.0033344+5.0859e-05	test-rmse:0.0050426+0.000355959
[9210]	train-rmse:0.0033328+5.087e-05	test-rmse:0.0050416+0.000356509
[9220]	train-rmse:0.0033314+5.0859e-05	test-rmse:0.0050404+0.000357668
[9230]	train-rmse:0.0033304+5.0859e-05	test-rmse:0.0050394+0.000358324
[9240]	train-rmse:0.003329+5.09706e-05	test-rmse:0.0050374+0.000358324
[9250]	train-rmse:0.0033276+5.06818e-05	test-rmse:0.0050372+0.000358752
[9260]	train-rmse:0.003326+5.09706e-05	test-rmse:0.0050368+0.000359424
[9270]	train-rmse:0.0033246+5.06818e-05	test-rmse:0.0050346+0.000359083
[9280]	train-rmse:0.0033234+5.0859e-05	test-rmse:0.0050328+0.00035998
[9290]	train-rmse:0.0033222+5.11562e-05	test-rmse:0.0050316+0.000360048
[9300]	train-rmse:0.0033208+5.12344e-05	test-rmse:0.0050304+0.00036014
[9310]	train-rmse:0.0033194+5.0859e-05	test-rmse:0.0050292+0.000359652
[9320]	train-rmse:0.0033182+5.11562e-05	test-rmse:0.0050282+0.000360379
[9330]	train-rmse:0.0033168+5.087e-05	test-rmse:0.0050274+0.000359843
[9340]	train-rmse:0.003315+5.09706e-05	test-rmse:0.005027+0.000359163
[9350]	train-rmse:0.0033138+5.087e-05	test-rmse:0.0050262+0.000358974
[9360]	train-rmse:0.0033124+5.09808e-05	test-rmse:0.0050266+0.000357087
[9370]	train-rmse:0.0033112+5.08818e-05	test-rmse:0.0050262+0.000356169
[9380]	train-rmse:0.0033098+5.087e-05	test-rmse:0.005026+0.000355667
[9390]	train-rmse:0.0033082+5.06731e-05	test-rmse:0.0050254+0.000355159
[9400]	train-rmse:0.003307+5.09706e-05	test-rmse:0.0050242+0.00035328
[9410]	train-rmse:0.0033056+5.06818e-05	test-rmse:0.0050232+0.00035255
[9420]	train-rmse:0.0033042+5.06731e-05	test-rmse:0.0050226+0.000352524
[9430]	train-rmse:0.003303+5.04856e-05	test-rmse:0.005021+0.00035336
[9440]	train-rmse:0.0033014+5.04959e-05	test-rmse:0.0050204+0.000352997
[9450]	train-rmse:0.0033002+5.03086e-05	test-rmse:0.0050204+0.000352335
[9460]	train-rmse:0.0032992+5.03086e-05	test-rmse:0.0050198+0.000352244
[9470]	train-rmse:0.0032976+5.05988e-05	test-rmse:0.0050186+0.000352306
[9480]	train-rmse:0.0032966+5.05988e-05	test-rmse:0.005018+0.000352628
[9490]	train-rmse:0.0032952+5.03126e-05	test-rmse:0.0050178+0.000354148
[9500]	train-rmse:0.003294+5.01238e-05	test-rmse:0.0050166+0.000354113
[9510]	train-rmse:0.0032926+5.02378e-05	test-rmse:0.0050154+0.0003534
[9520]	train-rmse:0.003291+5.01238e-05	test-rmse:0.0050144+0.000354781
[9530]	train-rmse:0.0032898+5.04238e-05	test-rmse:0.0050126+0.000355305
[9540]	train-rmse:0.0032884+5.01342e-05	test-rmse:0.0050122+0.000354372
[9550]	train-rmse:0.0032872+5.03086e-05	test-rmse:0.0050122+0.000354038
[9560]	train-rmse:0.0032856+5.02378e-05	test-rmse:0.005011+0.000353431
[9570]	train-rmse:0.0032846+5.02378e-05	test-rmse:0.00501+0.000352893
[9580]	train-rmse:0.0032832+5.03086e-05	test-rmse:0.00501+0.000352479
[9590]	train-rmse:0.003282+5.01238e-05	test-rmse:0.0050102+0.000351641
[9600]	train-rmse:0.0032804+5.04127e-05	test-rmse:0.00501+0.000353631
[9610]	train-rmse:0.0032794+5.04127e-05	test-rmse:0.0050094+0.000352895
[9620]	train-rmse:0.003278+5.04063e-05	test-rmse:0.005009+0.000352221
[9630]	train-rmse:0.0032766+5.05197e-05	test-rmse:0.0050076+0.000353027
[9640]	train-rmse:0.0032752+5.05901e-05	test-rmse:0.0050076+0.000353099
[9650]	train-rmse:0.003274+5.08881e-05	test-rmse:0.0050064+0.000352388
[9660]	train-rmse:0.0032726+5.05988e-05	test-rmse:0.0050058+0.000352601
[9670]	train-rmse:0.003271+5.08881e-05	test-rmse:0.0050052+0.000353243
[9680]	train-rmse:0.0032698+5.07835e-05	test-rmse:0.005004+0.000352277
[9690]	train-rmse:0.0032688+5.07835e-05	test-rmse:0.0050044+0.000352575
[9700]	train-rmse:0.0032672+5.07953e-05	test-rmse:0.0050038+0.000352142
[9710]	train-rmse:0.003266+5.09706e-05	test-rmse:0.005002+0.000351458
[9720]	train-rmse:0.0032646+5.05988e-05	test-rmse:0.005001+0.000350727
[9730]	train-rmse:0.0032634+5.04959e-05	test-rmse:0.005+0.000350487
[9740]	train-rmse:0.003262+5.04856e-05	test-rmse:0.0049998+0.000351225
[9750]	train-rmse:0.0032606+5.05988e-05	test-rmse:0.0049994+0.000350125
[9760]	train-rmse:0.0032596+5.05988e-05	test-rmse:0.0049992+0.00035031
[9770]	train-rmse:0.003258+5.04856e-05	test-rmse:0.0049978+0.000350121
[9780]	train-rmse:0.0032566+5.05988e-05	test-rmse:0.0049968+0.000350881
[9790]	train-rmse:0.0032556+5.05988e-05	test-rmse:0.0049958+0.000350905
[9800]	train-rmse:0.0032544+5.04959e-05	test-rmse:0.0049954+0.000349791
[9810]	train-rmse:0.0032532+5.03086e-05	test-rmse:0.004994+0.000350074
[9820]	train-rmse:0.0032518+5.04238e-05	test-rmse:0.0049932+0.00034946
[9830]	train-rmse:0.0032506+5.05988e-05	test-rmse:0.0049924+0.000349581
[9840]	train-rmse:0.0032494+5.04959e-05	test-rmse:0.0049918+0.000348615
[9850]	train-rmse:0.0032478+5.04238e-05	test-rmse:0.0049916+0.000349438
[9860]	train-rmse:0.0032466+5.05988e-05	test-rmse:0.0049908+0.000349558
[9870]	train-rmse:0.0032456+5.05988e-05	test-rmse:0.0049912+0.000348799
[9880]	train-rmse:0.0032444+5.04167e-05	test-rmse:0.0049908+0.000349058
[9890]	train-rmse:0.003243+5.01238e-05	test-rmse:0.0049898+0.000349086
[9900]	train-rmse:0.0032418+5.04238e-05	test-rmse:0.0049892+0.000348472
[9910]	train-rmse:0.0032402+5.03126e-05	test-rmse:0.0049888+0.000349463
[9920]	train-rmse:0.003239+5.01238e-05	test-rmse:0.0049876+0.000349487
[9930]	train-rmse:0.003238+5.01238e-05	test-rmse:0.0049872+0.000349272
[9940]	train-rmse:0.0032366+5.02418e-05	test-rmse:0.004986+0.000349702
[9950]	train-rmse:0.0032354+5.04167e-05	test-rmse:0.0049858+0.000349594
[9960]	train-rmse:0.0032338+4.99416e-05	test-rmse:0.0049854+0.000349
[9970]	train-rmse:0.0032328+4.99416e-05	test-rmse:0.004985+0.000349221
[9980]	train-rmse:0.0032316+5.02418e-05	test-rmse:0.0049852+0.000349318
[9990]	train-rmse:0.0032304+5.00544e-05	test-rmse:0.0049846+0.000348229
Ensemble-CV: 0.004985399999999999+0.0003491410030345907
(1359, 110) (454, 110) (1359,) (454,)
20170707_13h10m51s
In [2]:
# import version_information, load_ext

%reload_ext version_information

%version_information numpy, scipy, matplotlib, sklearn, version_information
Out[2]:
SoftwareVersion
Python3.6.1 64bit [GCC 4.4.7 20120313 (Red Hat 4.4.7-1)]
IPython5.2.2
OSLinux 3.13.0 123 generic x86_64 with debian jessie sid
numpy1.11.3
scipy0.18.1
matplotlib2.0.0
sklearn0.18.1
version_information1.0.3
Fri Jul 07 13:13:34 2017 CEST
In [ ]: