01 - Recommender System

  • "Given the values I do have, predict my missing values"
  • This is a kind of imputation / recommender system
  • Straight-forward matrix decomposition methods can help here

In [302]:
import numpy as np
import pandas as pd
from scipy.sparse.linalg import svds
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt

In [2]:
from jlab import load_test_data

In [110]:
X_train = pd.read_csv('MLchallenge2_training.csv')
X_test = load_test_data('test_in.csv')
X = (pd.concat([X_test, X_train], axis=0)
     .reset_index(drop=True)
     .fillna(0.0))
X_true = pd.read_csv('test_prediction.csv', names=['x', 'y', 'px', 'py', 'pz'],
                     header=None)

In [111]:
X.head()


Out[111]:
x y z px py pz x1 y1 z1 px1 ... z23 px23 py23 pz23 x24 y24 z24 px24 py24 pz24
0 0.877 1.322 65.0 -0.244 -0.053 2.414 -10.669 0.330 176.944 -0.254 ... 0.00 0.000 0.000 0.000 0.0 0.0 0.000 0.0 0.0 0.0
1 0.786 -2.483 65.0 0.103 0.432 2.593 7.366 15.502 176.944 0.206 ... 0.00 0.000 0.000 0.000 0.0 0.0 0.000 0.0 0.0 0.0
2 -13.134 -26.531 65.0 0.064 -0.021 0.953 -7.586 -30.687 176.944 0.027 ... 0.00 0.000 0.000 0.000 0.0 0.0 0.000 0.0 0.0 0.0
3 18.454 2.805 65.0 -0.019 0.069 1.833 18.043 6.797 176.944 0.013 ... 0.00 0.000 0.000 0.000 0.0 0.0 0.000 0.0 0.0 0.0
4 15.552 -19.196 65.0 -0.010 -0.011 2.366 15.068 -19.750 176.944 -0.014 ... 341.28 -0.014 -0.002 2.351 0.0 0.0 343.405 0.0 0.0 0.0

5 rows × 150 columns


In [112]:
X_true.head()


Out[112]:
x y px py pz
0 -23.123945 3.142886 -0.235592 0.091612 2.413377
1 19.633486 32.319292 0.314376 0.316425 2.592952
2 -8.308506 -39.299613 -0.020097 -0.051232 0.948906
3 19.918838 10.664617 0.038102 0.047740 1.864014
4 13.649239 -20.616935 -0.015548 0.001471 2.323953

In [97]:
U, sigma, Vt = svds(X, k=30)
sigma = np.diag(sigma)

In [98]:
X_pred = pd.DataFrame(np.dot(np.dot(U, sigma), Vt), columns=X.columns, index=X.index)
X_pred.head()


Out[98]:
x y z px py pz x1 y1 z1 px1 ... z23 px23 py23 pz23 x24 y24 z24 px24 py24 pz24
0 1.139458 1.841577 65.000348 -0.132866 -0.152526 2.278765 -10.308074 0.077691 176.945038 -0.140774 ... -0.001327 -0.020493 -0.114052 0.212795 -0.317946 0.195674 -0.005499 -0.020159 -0.113749 0.212069
1 1.538296 -3.277263 65.000407 -0.142155 0.290338 2.350373 6.721598 15.147325 176.945195 -0.041034 ... -0.005960 -0.140109 0.100994 0.363288 0.135978 0.424870 -0.004066 -0.140975 0.100855 0.362115
2 -13.669357 -25.403695 64.999143 0.235995 0.027828 1.139225 -7.365034 -31.750689 176.941817 0.189261 ... -0.004733 0.290119 0.160645 -0.763249 0.596819 3.866317 0.006622 0.293293 0.166603 -0.759302
3 18.292252 2.048436 64.999907 -0.122082 0.101017 1.869429 18.183965 7.240228 176.943918 -0.092881 ... -0.006987 -0.185933 0.090368 -0.093341 1.670177 0.903375 0.008946 -0.176907 0.092282 -0.100364
4 15.657900 -19.322310 65.000323 -0.012487 0.010699 2.273879 14.897386 -19.569022 176.945024 -0.021853 ... 341.263763 -0.022465 0.067983 2.253718 6.673950 -10.270620 343.400090 -0.025013 0.081539 2.256946

5 rows × 150 columns

Hooray, we did it

  • Now we need to figure out how well it actually did

In [303]:
def get_test_detector_plane(row):
    # Find location of nans, get the first one
    # Then divide by 6 (6 values per detector plane)
    plane = np.where(np.isnan(row.values))[0][0]/6
    return int(plane)

In [304]:
def get_vals_at_plane(row, plane):
    cols = [i + str(int(plane)) for i in ['x','y','px','py','pz']]
    return row[cols].values

In [305]:
def get_vals_at_eval_plane(X_test, X_pred):
    X = X_pred.copy()
    X['eval_plane'] = X_test.apply(get_test_detector_plane, axis=1)
    retvals = X.loc[X_test.index.values].apply(lambda x: get_vals_at_plane(x, x['eval_plane']), axis=1)
    return retvals

In [306]:
eval_planes = X_test.apply(get_test_detector_plane, axis=1)

In [309]:
get_vals_at_plane(X_test.loc[15], 7)


Out[309]:
array([24.406, 30.898,  0.139,  0.107,  2.34 ])

Make a recommender class, a la sklearn

  • Should have fit, predict methods

In [311]:
import logging
from jlab import COLS
from sklearn.preprocessing import StandardScaler

class DetectorRecommender(object):
    
    def __init__(self, k=20):
        
        self.logger = logging.getLogger(__name__)
        self.k = k
        self.planes = 27
        self.kinematics = ["x", "y", "px", "py", "pz"]
        self.cols = COLS
        self.X_train = pd.DataFrame(columns=self.cols)
        self.X_test = pd.DataFrame(columns=self.cols)
        self.scaler = StandardScaler()
        
    def fit(self, df):
        """SVD isn't really 'trained', but... """
        
        self.X_train = df.copy(deep=True)
        
    def predict(self, df):
        
        # Make a copy, index it from 0 to N
        self.logger.debug("Making a copy")
        self.X_test = df.copy(deep=True).reset_index(drop=True)
        
        # For each track, figure out which detector plane we'll evaluate
        self.logger.debug("Determining evaluation planes")
        eval_planes = self.X_test.apply(self.get_eval_detector_plane, axis=1)
        
        # Combine with the training set, shuffle it, and fill missing values
        self.logger.debug("Combining train and test sets for SVD")
        X = (pd.concat([self.X_test, self.X_train], axis=0)
             .reset_index(drop=True)
             .sample(replace=False, frac=1.0))
        
        # Fill with the mean values of each column
        self.logger.debug("Filling with mean values")
        X = X.fillna(X.mean())
        
        # Normalize the values
        self.logger.debug("Applying standardscaler")
        X_norm_values = self.scaler.fit_transform(X)
        X_norm = pd.DataFrame(X_norm_values, columns=X.columns, index=X.index)
        
        # Single-value Decomposition
        self.logger.debug("Making predictions")
        X_pred_norm = self.fit_predict_svds(X_norm)
        
        # Extract our test tracks
        X_pred_norm = X_pred_norm.loc[self.X_test.index, :].sort_index()
        
        # Un-normalize them
        X_pred_values = self.scaler.inverse_transform(X_pred_norm)
        X_pred = pd.DataFrame(X_pred_values, columns=X_pred_norm.columns,
                              index=X_pred_norm.index)
        self.logger.debug("De-normalized. Extracting pred values.")
        
        # Extract just the non-z kinematic values for the eval planes
        det_eval_values = self.extract_values_at_eval_planes(X_pred, eval_planes)
        
        return det_eval_values
    

    def fit_predict_svds(self, X):
        U, sigma, Vt = svds(X, k=self.k)
        sigma = np.diag(sigma)
        X_pred = pd.DataFrame(np.dot(np.dot(U, sigma), Vt),
                              columns=X.columns, index=X.index)
        return X_pred
        
    def extract_values_at_eval_planes(self, pred, planes):
        X = pred.copy(deep=True)
        X['eval_plane'] = planes
        retvals = X.apply(lambda x: self.get_vals_at_plane(x, x['eval_plane']), axis=1)
        retvals_df = pd.DataFrame(retvals.values.tolist(), columns=self.kinematics)
        return retvals_df
    
    def get_vals_at_plane(self, row, plane):
        cols = [i + str(int(plane)) for i in self.kinematics]
        return row[cols].values
    
    def get_eval_detector_plane(self, row):
        # Find location of nans, get the first one
        # Then divide by 6 (6 values per detector plane)
        plane = np.where(np.isnan(row.values))[0][0]/6
        return int(plane)

In [284]:
logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s - %(name)-12s - %(levelname)-8s - %(message)s')

In [285]:
predictor = DetectorRecommender()

In [286]:
predictor.fit(X_train)

In [287]:
X_pred = predictor.predict(X_test)


2019-10-29 07:24:55,659 __main__     DEBUG    Making a copy
2019-10-29 07:24:55,667 __main__     DEBUG    Determining evaluation planes
2019-10-29 07:24:55,770 __main__     DEBUG    Combining train and test sets for SVD
2019-10-29 07:24:56,855 __main__     DEBUG    Filling with mean values
2019-10-29 07:24:57,086 __main__     DEBUG    Applying standardscaler
2019-10-29 07:24:57,899 __main__     DEBUG    Making predictions
2019-10-29 07:24:59,484 __main__     DEBUG    De-normalized. Extracting pred values.

In [288]:
X_pred.head()


Out[288]:
x y px py pz
0 -10.402730 0.495083 -0.101857 0.040509 2.136227
1 9.098936 15.436974 0.151903 0.137151 2.225860
2 -4.508659 -18.078571 0.009943 0.003992 1.521642
3 10.067680 4.905410 0.003623 0.020500 1.921455
4 12.463507 -17.100075 -0.009114 0.004804 2.296975

In [290]:
mean_squared_error(X_true, X_pred)


Out[290]:
37.71380513026001

Tune the one hyperparameter we have


In [294]:
for k in range(5,15):
    predictor = DetectorRecommender(k=k)
    predictor.fit(X_train)
    X_pred = predictor.predict(X_test)
    print(k, mean_squared_error(X_true, X_pred))


2019-10-29 08:40:56,810 __main__     DEBUG    Making a copy
2019-10-29 08:40:56,824 __main__     DEBUG    Determining evaluation planes
2019-10-29 08:40:56,949 __main__     DEBUG    Combining train and test sets for SVD
2019-10-29 08:40:58,033 __main__     DEBUG    Filling with mean values
2019-10-29 08:40:58,278 __main__     DEBUG    Applying standardscaler
2019-10-29 08:40:59,122 __main__     DEBUG    Making predictions
2019-10-29 08:40:59,872 __main__     DEBUG    De-normalized. Extracting pred values.
---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
//anaconda3/lib/python3.7/importlib/_bootstrap.py in _find_and_load(name, import_)

//anaconda3/lib/python3.7/importlib/_bootstrap.py in _find_and_load_unlocked(name, import_)

//anaconda3/lib/python3.7/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)

ModuleNotFoundError: No module named 'pandas._libs.pandas'

During handling of the above exception, another exception occurred:

KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-294-acb4781ec768> in <module>
      3     predictor = DetectorRecommender(k=k)
      4     predictor.fit(X_train)
----> 5     X_pred = predictor.predict(X_test)
      6     print(k, mean_squared_error(X_true, X_pred))

<ipython-input-283-ed69ee6e949e> in predict(self, df)
     60 
     61         # Extract just the non-z kinematic values for the eval planes
---> 62         det_eval_values = self.extract_values_at_eval_planes(X_pred, eval_planes)
     63 
     64         return det_eval_values

<ipython-input-283-ed69ee6e949e> in extract_values_at_eval_planes(self, pred, planes)
     74         X = pred.copy(deep=True)
     75         X['eval_plane'] = planes
---> 76         retvals = X.apply(lambda x: self.get_vals_at_plane(x, x['eval_plane']), axis=1)
     77         retvals_df = pd.DataFrame(retvals.values.tolist(), columns=self.kinematics)
     78         return retvals_df

//anaconda3/lib/python3.7/site-packages/pandas/core/frame.py in apply(self, func, axis, broadcast, raw, reduce, result_type, args, **kwds)
   6485                          args=args,
   6486                          kwds=kwds)
-> 6487         return op.get_result()
   6488 
   6489     def applymap(self, func):

//anaconda3/lib/python3.7/site-packages/pandas/core/apply.py in get_result(self)
    149             return self.apply_raw()
    150 
--> 151         return self.apply_standard()
    152 
    153     def apply_empty_result(self):

//anaconda3/lib/python3.7/site-packages/pandas/core/apply.py in apply_standard(self)
    255 
    256         # compute the result using the series generator
--> 257         self.apply_series_generator()
    258 
    259         # wrap results

//anaconda3/lib/python3.7/site-packages/pandas/core/apply.py in apply_series_generator(self)
    284             try:
    285                 for i, v in enumerate(series_gen):
--> 286                     results[i] = self.f(v)
    287                     keys.append(v.name)
    288             except Exception as e:

<ipython-input-283-ed69ee6e949e> in <lambda>(x)
     74         X = pred.copy(deep=True)
     75         X['eval_plane'] = planes
---> 76         retvals = X.apply(lambda x: self.get_vals_at_plane(x, x['eval_plane']), axis=1)
     77         retvals_df = pd.DataFrame(retvals.values.tolist(), columns=self.kinematics)
     78         return retvals_df

<ipython-input-283-ed69ee6e949e> in get_vals_at_plane(self, row, plane)
     80     def get_vals_at_plane(self, row, plane):
     81         cols = [i + str(int(plane)) for i in self.kinematics]
---> 82         return row[cols].values
     83 
     84     def get_eval_detector_plane(self, row):

//anaconda3/lib/python3.7/site-packages/pandas/core/series.py in __getitem__(self, key)
    909             key = check_bool_indexer(self.index, key)
    910 
--> 911         return self._get_with(key)
    912 
    913     def _get_with(self, key):

//anaconda3/lib/python3.7/site-packages/pandas/core/series.py in _get_with(self, key)
    949             # handle the dup indexing case (GH 4246)
    950             if isinstance(key, (list, tuple)):
--> 951                 return self.loc[key]
    952 
    953             return self.reindex(key)

//anaconda3/lib/python3.7/site-packages/pandas/core/indexing.py in __getitem__(self, key)
   1498 
   1499             maybe_callable = com.apply_if_callable(key, self.obj)
-> 1500             return self._getitem_axis(maybe_callable, axis=axis)
   1501 
   1502     def _is_scalar_access(self, key):

//anaconda3/lib/python3.7/site-packages/pandas/core/indexing.py in _getitem_axis(self, key, axis)
   1900                     raise ValueError('Cannot index with multidimensional key')
   1901 
-> 1902                 return self._getitem_iterable(key, axis=axis)
   1903 
   1904             # nested tuple slicing

//anaconda3/lib/python3.7/site-packages/pandas/core/indexing.py in _getitem_iterable(self, key, axis)
   1203             # A collection of keys
   1204             keyarr, indexer = self._get_listlike_indexer(key, axis,
-> 1205                                                          raise_missing=False)
   1206             return self.obj._reindex_with_indexers({axis: [keyarr, indexer]},
   1207                                                    copy=True, allow_dups=True)

//anaconda3/lib/python3.7/site-packages/pandas/core/indexing.py in _get_listlike_indexer(self, key, axis, raise_missing)
   1152             if len(ax) or not len(key):
   1153                 key = self._convert_for_reindex(key, axis)
-> 1154             indexer = ax.get_indexer_for(key)
   1155             keyarr = ax.reindex(keyarr)[0]
   1156         else:

//anaconda3/lib/python3.7/site-packages/pandas/core/indexes/base.py in get_indexer_for(self, target, **kwargs)
   4453         """
   4454         if self.is_unique:
-> 4455             return self.get_indexer(target, **kwargs)
   4456         indexer, _ = self.get_indexer_non_unique(target, **kwargs)
   4457         return indexer

//anaconda3/lib/python3.7/site-packages/pandas/core/indexes/base.py in get_indexer(self, target, method, limit, tolerance)
   2716     def get_indexer(self, target, method=None, limit=None, tolerance=None):
   2717         method = missing.clean_reindex_fill_method(method)
-> 2718         target = ensure_index(target)
   2719         if tolerance is not None:
   2720             tolerance = self._convert_tolerance(tolerance, target)

//anaconda3/lib/python3.7/site-packages/pandas/core/indexes/base.py in ensure_index(index_like, copy)
   5362             index_like = list(index_like)
   5363 
-> 5364         converted, all_arrays = lib.clean_index_list(index_like)
   5365 
   5366         if len(converted) > 0 and all_arrays:

pandas/_libs/lib.pyx in pandas._libs.lib.clean_index_list()

pandas/_libs/lib.pyx in pandas._libs.lib.infer_dtype()

//anaconda3/lib/python3.7/importlib/_bootstrap.py in _find_and_load(name, import_)

KeyboardInterrupt: 
  • Optimal performance at k=7

In [300]:
predictor = DetectorRecommender(k=7)
predictor.fit(X_train)
X_pred = predictor.predict(X_test)
print(mean_squared_error(X_true, X_pred))


2019-10-29 08:47:32,386 __main__     DEBUG    Making a copy
2019-10-29 08:47:32,391 __main__     DEBUG    Determining evaluation planes
2019-10-29 08:47:32,498 __main__     DEBUG    Combining train and test sets for SVD
2019-10-29 08:47:33,901 __main__     DEBUG    Filling with mean values
2019-10-29 08:47:34,069 __main__     DEBUG    Applying standardscaler
2019-10-29 08:47:35,066 __main__     DEBUG    Making predictions
2019-10-29 08:47:35,796 __main__     DEBUG    De-normalized. Extracting pred values.
20.43716499928241

Surprise!

Try out this well-supported recommender package


In [314]:
!pip install scikit-surprise


Collecting scikit-surprise
  Using cached https://files.pythonhosted.org/packages/f5/da/b5700d96495fb4f092be497f02492768a3d96a3f4fa2ae7dea46d4081cfa/scikit-surprise-1.1.0.tar.gz
Requirement already satisfied: joblib>=0.11 in /anaconda3/lib/python3.7/site-packages (from scikit-surprise) (0.13.2)
Requirement already satisfied: numpy>=1.11.2 in /anaconda3/lib/python3.7/site-packages (from scikit-surprise) (1.16.4)
Requirement already satisfied: scipy>=1.0.0 in /anaconda3/lib/python3.7/site-packages (from scikit-surprise) (1.3.0)
Requirement already satisfied: six>=1.10.0 in /anaconda3/lib/python3.7/site-packages (from scikit-surprise) (1.12.0)
Building wheels for collected packages: scikit-surprise
  Building wheel for scikit-surprise (setup.py) ... done
  Stored in directory: /Users/dannowitz/Library/Caches/pip/wheels/cc/fa/8c/16c93fccce688ae1bde7d979ff102f7bee980d9cfeb8641bcf
Successfully built scikit-surprise
Installing collected packages: scikit-surprise
Successfully installed scikit-surprise-1.1.0

In [298]:
import surprise


---------------------------------------------------------------------------
ModuleNotFoundError                       Traceback (most recent call last)
<ipython-input-298-7ee1a83cf20e> in <module>
----> 1 import surprise

ModuleNotFoundError: No module named 'surprise'

In [ ]:
X.melt()

In [169]:
X.index.name = "track_id"

In [170]:
X.head().reset_index().melt(id_vars=['track_id'])


Out[170]:
track_id variable value
0 0 x 0.877
1 1 x 0.786
2 2 x -13.134
3 3 x 18.454
4 4 x 15.552
5 0 y 1.322
6 1 y -2.483
7 2 y -26.531
8 3 y 2.805
9 4 y -19.196
10 0 z 65.000
11 1 z 65.000
12 2 z 65.000
13 3 z 65.000
14 4 z 65.000
15 0 px -0.244
16 1 px 0.103
17 2 px 0.064
18 3 px -0.019
19 4 px -0.010
20 0 py -0.053
21 1 py 0.432
22 2 py -0.021
23 3 py 0.069
24 4 py -0.011
25 0 pz 2.414
26 1 pz 2.593
27 2 pz 0.953
28 3 pz 1.833
29 4 pz 2.366
... ... ... ...
720 0 x24 0.000
721 1 x24 0.000
722 2 x24 0.000
723 3 x24 0.000
724 4 x24 0.000
725 0 y24 0.000
726 1 y24 0.000
727 2 y24 0.000
728 3 y24 0.000
729 4 y24 0.000
730 0 z24 0.000
731 1 z24 0.000
732 2 z24 0.000
733 3 z24 0.000
734 4 z24 343.405
735 0 px24 0.000
736 1 px24 0.000
737 2 px24 0.000
738 3 px24 0.000
739 4 px24 0.000
740 0 py24 0.000
741 1 py24 0.000
742 2 py24 0.000
743 3 py24 0.000
744 4 py24 0.000
745 0 pz24 0.000
746 1 pz24 0.000
747 2 pz24 0.000
748 3 pz24 0.000
749 4 pz24 0.000

750 rows × 3 columns


In [164]:
X.sample(replace=False, frac=1.0)


Out[164]:
x y z px py pz x1 y1 z1 px1 ... z23 px23 py23 pz23 x24 y24 z24 px24 py24 pz24
52641 -17.403500 -21.669100 65.0 0.018850 -0.070221 2.808130 -17.098200 -24.305200 176.944 -0.004557 ... 341.280 -0.018001 -0.067943 2.800710 -18.208300 -28.138900 343.405 -0.017034 -0.068414 2.800680
153188 -2.030520 7.950830 65.0 0.140483 0.195651 2.262990 6.209280 16.391700 176.944 0.191073 ... 341.280 0.224867 0.054636 2.249030 22.275300 23.367100 343.405 0.224612 0.054472 2.249040
26736 3.743760 -20.476100 65.0 -0.016751 -0.022347 0.948275 0.072519 -22.697500 176.944 -0.041323 ... 341.280 -0.020188 0.031900 0.936075 -6.918930 -20.234000 343.405 -0.018757 0.031732 0.936098
113795 -8.214490 12.470600 65.0 -0.127324 -0.017557 2.029990 -15.177600 12.624900 176.944 -0.126454 ... 341.280 -0.105276 0.072318 2.020760 -24.585400 17.047300 343.405 -0.105614 0.071741 2.020650
72875 5.645860 -16.347100 65.0 0.062687 -0.018573 0.946145 11.775000 -20.705000 176.944 0.038080 ... 341.280 -0.015396 -0.050101 0.934989 12.384100 -31.306800 343.405 -0.014956 -0.049401 0.935021
86023 20.605700 16.160900 65.0 -0.026888 -0.161448 3.392590 19.447100 10.757900 176.944 -0.046720 ... 341.280 -0.093216 -0.140715 3.382280 16.019200 2.902030 343.405 -0.093604 -0.139912 3.382290
8168 0.933000 3.174000 65.0 -0.361000 0.165000 2.568000 -13.749000 11.939000 176.944 -0.305000 ... 0.000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000 0.000000 0.000000 0.000000
106263 1.880180 -0.830898 65.0 0.044663 0.153992 2.434700 4.864360 5.857340 176.944 0.084237 ... 341.280 0.121784 0.095721 2.427940 12.198400 13.665300 343.405 0.121635 0.095944 2.427920
9991 3.837000 4.565000 65.0 0.101000 0.220000 2.121000 10.987000 14.983000 176.944 0.160000 ... 341.280 0.209000 0.084000 2.081000 0.000000 0.000000 343.405 0.000000 0.000000 0.000000
68866 -0.303658 -3.715340 65.0 -0.108675 0.007009 1.478080 -8.248970 -1.828120 176.944 -0.098208 ... 341.280 -0.052205 0.090764 1.471280 -16.913500 6.678260 343.405 -0.051851 0.090275 1.471310
162764 -6.776190 1.626970 65.0 0.064371 -0.270687 1.690320 -5.811210 -16.805000 176.944 -0.039615 ... 341.280 -0.168750 -0.209706 1.676150 -17.422700 -40.954300 343.405 -0.167680 -0.209256 1.676290
774 21.037000 -9.719000 65.0 -0.011000 0.004000 0.850000 19.240000 -9.999000 176.944 -0.020000 ... 0.000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000 0.000000 0.000000 0.000000
100949 -5.384190 1.101730 65.0 -0.059642 0.092779 1.522140 -8.278280 8.428950 176.944 -0.019525 ... 341.280 0.040779 0.089976 1.512700 -6.462910 19.579000 343.405 0.040538 0.089499 1.512720
23283 -1.805330 3.313940 65.0 -0.005423 -0.090316 0.877848 -6.018140 -6.859880 176.944 -0.059200 ... 341.280 -0.078196 0.017300 0.875210 -21.228700 -10.719800 343.405 -0.077756 0.017536 0.875228
179305 9.360590 -23.326300 65.0 0.022421 -0.201066 2.440920 9.070760 -32.725100 176.944 -0.038030 ... 341.280 -0.104136 -0.170012 2.433520 3.381050 -45.760300 343.405 -0.102666 -0.169084 2.433620
111277 -20.514700 -14.330300 65.0 0.234283 0.376331 3.267670 -11.414500 -2.153730 176.944 0.296662 ... 341.280 0.373024 0.232747 3.257600 5.947730 12.142200 343.405 0.373101 0.231894 3.257630
106579 13.228300 -13.170400 65.0 -0.031664 -0.011224 1.213490 9.875610 -13.559900 176.944 -0.037209 ... 341.280 -0.018397 0.034510 1.191770 5.363270 -10.780300 343.405 -0.017366 0.035017 1.191750
88202 -2.053080 -4.097340 65.0 0.289073 0.382275 3.274180 9.089380 8.043670 176.944 0.360944 ... 341.280 0.431269 0.208433 3.263760 29.721000 21.203500 343.405 0.431292 0.207628 3.263780
185546 -0.318578 -2.134700 65.0 0.052389 -0.065996 1.007470 2.932300 -10.655600 176.944 0.001524 ... 341.280 -0.059951 -0.050958 0.999429 -3.284250 -22.720700 343.405 -0.059603 -0.050474 0.999452
125897 -2.211560 -2.917060 65.0 0.114072 0.183202 3.330900 2.142510 2.890880 176.944 0.145972 ... 341.280 0.180823 0.115266 3.328730 10.493200 9.698040 343.405 0.180663 0.115292 3.328720
74084 18.407500 1.040740 65.0 -0.036470 -0.032187 1.080980 14.170800 -2.607470 176.944 -0.049443 ... 341.280 -0.050833 0.021609 1.072140 5.798510 -3.415670 343.405 -0.050828 0.022229 1.072110
107710 2.824960 5.463330 65.0 0.081918 0.231546 2.480860 7.677970 15.053300 176.944 0.130040 ... 341.280 0.184727 0.133392 2.463400 18.867500 25.979500 343.405 0.184268 0.133557 2.463310
132778 19.829100 13.255400 65.0 -0.038228 -0.029251 1.661550 17.082900 11.449700 176.944 -0.042207 ... 341.280 -0.052785 0.002584 1.653160 12.412700 9.914800 343.405 -0.053269 0.003547 1.653120
70579 -2.730520 16.684700 65.0 0.017456 0.006928 0.765720 1.250240 16.442900 176.944 0.029360 ... 341.280 -0.009263 -0.031761 0.759890 4.502360 10.430600 343.405 -0.010347 -0.031359 0.759874
84800 -24.755800 -16.319500 65.0 -0.054745 -0.064651 1.428420 -30.142100 -19.852100 176.944 -0.079429 ... 341.280 -0.066961 0.022830 1.420580 -39.765400 -18.717500 343.405 -0.067480 0.022603 1.420550
139445 -10.397700 -22.575400 65.0 0.020043 0.000722 2.870850 -9.890900 -22.307600 176.944 0.008114 ... 341.280 0.016643 0.004904 2.860890 -9.477660 -21.760600 343.405 0.017740 0.004800 2.860860
12426 -4.461380 -12.838000 65.0 -0.096581 -0.048777 1.291250 -13.909600 -14.912500 176.944 -0.118208 ... 341.280 -0.079925 0.075036 1.278840 -27.980400 -8.658770 343.405 -0.078162 0.074905 1.278940
61559 22.432000 -10.133700 65.0 -0.230991 0.076134 2.024080 10.247400 -4.411110 176.944 -0.204290 ... 341.280 -0.124910 0.207897 2.009860 -3.490210 9.975150 343.405 -0.124311 0.208228 2.009850
152411 -0.003170 0.195458 65.0 -0.000435 -0.000134 2.755740 0.012984 0.110794 176.949 0.000409 ... 341.285 -0.001587 -0.003642 2.751290 0.007275 -0.056116 343.410 -0.001506 -0.003721 2.751280
90084 -28.119100 5.460000 65.0 -0.008808 0.050725 1.682910 -27.855800 9.152940 176.944 0.016379 ... 341.280 0.039728 0.027348 1.674740 -24.622800 13.900900 343.405 0.038104 0.024956 1.674800
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
84667 2.689220 -28.605700 65.0 -0.254641 0.386541 3.413680 -4.703460 -15.351700 176.944 -0.190530 ... 341.280 -0.065938 0.458481 3.402140 -10.971000 6.509650 343.405 -0.064588 0.458157 3.402190
90539 0.221192 18.097200 65.0 -0.155071 -0.168101 1.320750 -15.749000 7.866840 176.944 -0.212274 ... 341.280 -0.200731 0.081879 1.314180 -42.894700 10.290900 343.405 -0.200652 0.081686 1.314190
173733 2.465190 0.058358 65.0 0.083595 -0.417077 2.540330 3.865870 -18.548600 176.944 -0.022112 ... 341.280 -0.165117 -0.381430 2.533340 -3.004110 -45.210100 343.405 -0.165279 -0.380250 2.533480
39702 -8.104120 10.907500 65.0 0.533613 -0.211430 3.409400 8.791300 2.626460 176.944 0.490080 ... 341.280 0.389674 -0.413508 3.404430 30.259300 -15.231600 343.405 0.389461 -0.413379 3.404450
194861 0.055056 0.094431 65.0 -0.046809 0.049433 0.769948 -3.531010 9.374300 176.944 0.001382 ... 341.280 0.064795 0.015442 0.752946 6.389850 20.366700 343.405 0.064051 0.015007 0.753004
103733 -10.395400 -0.683712 65.0 -0.010013 -0.226165 1.682020 -13.691500 -15.092500 176.944 -0.088697 ... 341.280 -0.176224 -0.133804 1.674640 -27.849300 -31.697000 343.405 -0.175609 -0.133995 1.674670
149517 -9.186860 -27.594300 65.0 -0.108609 -0.078727 1.377480 -19.796800 -31.232400 176.944 -0.145766 ... 341.280 -0.107092 0.079054 1.369070 -36.680100 -25.964900 343.405 -0.104972 0.077933 1.369280
99419 3.563040 -21.694300 65.0 0.100547 -0.149252 2.086780 7.513040 -30.632000 176.944 0.044049 ... 341.280 -0.034151 -0.176998 2.076390 7.154490 -45.328200 343.405 -0.032988 -0.176680 2.076420
41557 -27.184300 -5.035790 65.0 0.179608 0.419669 3.281330 -19.758200 8.956280 176.944 0.254547 ... 341.280 0.357109 0.305586 3.267220 -3.582210 27.122800 343.405 0.356950 0.303920 3.267380
94665 2.699820 -2.211540 65.0 0.046064 -0.310255 3.391450 3.287070 -12.526800 176.944 -0.008451 ... 341.280 -0.087813 -0.298279 3.372270 0.565181 -27.771700 343.405 -0.087300 -0.297890 3.372300
51548 0.325248 6.895720 65.0 -0.127110 0.202310 3.279360 -3.271570 14.060300 176.944 -0.075643 ... 341.280 -0.013623 0.231354 3.246960 -5.315550 25.773800 343.405 -0.013817 0.231177 3.246950
118805 0.404166 -3.537550 65.0 0.162770 0.058662 2.252980 8.662210 -1.833260 176.944 0.168637 ... 341.280 0.158033 -0.054185 2.248440 20.839100 -3.923670 343.405 0.157975 -0.054099 2.248430
13750 9.255830 -24.548300 65.0 -0.169851 0.352710 2.939570 3.974210 -10.929600 176.944 -0.101787 ... 341.280 0.019941 0.378770 2.926730 1.781480 10.393100 343.405 0.021234 0.379389 2.926630
108655 5.985100 -0.480394 65.0 0.169454 -0.272131 2.706730 11.662200 -12.540000 176.944 0.101571 ... 341.280 -0.007424 -0.316497 2.684300 14.096900 -32.368300 343.405 -0.007295 -0.316072 2.684330
55120 3.038330 1.345630 65.0 0.061796 0.066582 0.678214 17.533900 6.682690 176.944 0.101378 ... 341.280 0.001406 -0.086809 0.668906 32.316700 -10.926000 343.405 0.000737 -0.085345 0.669059
106771 -0.013947 -0.025127 65.0 -0.001486 -0.014473 1.339990 -0.583045 -1.203970 176.949 -0.009832 ... 341.285 -0.015039 -0.001861 1.334370 -2.354610 -2.052180 343.410 -0.014944 -0.001862 1.334350
173120 -9.988740 2.818220 65.0 -0.021440 0.012570 2.089030 -10.981100 3.793510 176.944 -0.018451 ... 341.280 -0.007395 0.025425 2.082950 -11.957600 5.981920 343.405 -0.007293 0.024869 2.082930
105213 16.954800 -19.160600 65.0 0.008842 -0.115890 2.240690 16.545900 -25.336100 176.944 -0.029521 ... 341.280 -0.071935 -0.098546 2.227310 11.995400 -34.304500 343.405 -0.070822 -0.097757 2.227370
115152 11.868200 5.808590 65.0 0.019684 0.031704 0.961732 15.349000 8.096660 176.944 0.041201 ... 341.280 0.020904 -0.024226 0.957033 21.525400 4.952710 343.405 0.020439 -0.023539 0.957047
123169 17.387700 -17.390300 65.0 -0.122463 0.006681 1.322680 7.193350 -14.763700 176.944 -0.115879 ... 341.280 -0.045694 0.118947 1.316620 -3.586070 -3.026220 343.405 -0.044501 0.119156 1.316610
54520 18.274600 22.445900 65.0 0.001637 0.057846 1.422390 19.748500 26.404400 176.944 0.036400 ... 341.280 0.044200 0.015554 1.406570 25.668100 29.392400 343.405 0.042735 0.016625 1.406580
189619 -19.121000 -21.110900 65.0 -0.011307 0.125346 0.974542 -16.780700 -6.439700 176.944 0.054798 ... 341.280 0.125816 0.014268 0.961904 0.914709 6.021310 343.405 0.125868 0.013488 0.961892
72881 -1.030320 0.342606 65.0 -0.085734 0.053241 1.032770 -7.946130 8.346990 176.944 -0.032841 ... 341.280 0.046426 0.073594 1.013870 -5.381690 22.881500 343.405 0.045923 0.072841 1.013930
128202 -0.229217 2.634270 65.0 0.080086 -0.067987 1.858030 3.891180 -2.190490 176.944 0.054233 ... 341.280 0.006015 -0.102249 1.857260 6.326330 -11.158300 343.405 0.006265 -0.102354 1.857220
53259 11.155500 6.533490 65.0 0.160172 0.074589 1.579250 23.550200 9.400140 176.944 0.186249 ... 341.280 0.149452 -0.094494 1.570580 42.036600 3.401820 343.405 0.148595 -0.093979 1.570680
91301 16.311700 14.807100 65.0 0.012096 0.091521 2.377860 17.399400 18.855100 176.944 0.034647 ... 341.280 0.052481 0.059840 2.368940 20.904100 23.296100 343.405 0.051644 0.060250 2.368940
30603 19.594000 -0.110574 65.0 0.190400 -0.214448 1.726000 29.076900 -16.226100 176.944 0.097309 ... 341.275 -0.052021 -0.260531 1.698040 30.118600 -44.493200 343.400 -0.049557 -0.258858 1.697100
108185 -0.585386 -16.813200 65.0 0.001635 -0.023701 0.580557 -3.525380 -21.034800 176.944 -0.030823 ... 341.280 -0.003167 0.031177 0.569555 -11.945000 -16.778400 343.405 -0.002068 0.031175 0.569542
189362 7.449150 -24.089400 65.0 0.134992 0.347943 2.403750 15.420800 -8.895640 176.944 0.207468 ... 341.280 0.306180 0.203148 2.397850 33.575900 8.200400 343.405 0.306837 0.203232 2.397740
63119 5.085550 5.628470 65.0 0.277390 -0.005793 3.394690 14.142700 4.361040 176.944 0.262676 ... 341.280 0.232207 -0.126603 3.371460 26.424900 -0.715901 343.405 0.231730 -0.126050 3.371490

204601 rows × 150 columns


In [ ]: