In [6]:
import pyspark as ps
import numpy as np
import pyspark.sql.functions as F

In [3]:
spark = (
    ps.sql.SparkSession.builder
    # .master("local[8]")
    .appName("LatentFeatureExploration")
    .getOrCreate()
)

In [4]:
# Load restaurant metadata
restaurants_df = spark.read.parquet('../data/restaurants')

# Load restaurant discount factors
discount_factor_df = spark.read.parquet('../data/discount_factor')

# Load restaurant ids into mapping dataframe
restaurant_id_map = []
index = 0
with open('../data/product_labels.txt') as f:
    for line in f:
        restaurant_id = line.strip()
        restaurant_id_map.append((restaurant_id, index))
        index += 1

restaurant_id_map_df = spark.createDataFrame(restaurant_id_map, ['id', 'item'])

restaurants_with_id_df = restaurants_df.join(restaurant_id_map_df, on='id')

# Load avg_rating that was calculated from model's training data
avg_rating_df = spark.read.parquet('../data/avg_rating')

# Load item_bias that was calculated from model's training data
item_bias_df = spark.read.parquet('../data/item_bias')

# Load item_factors that generated by model from training data
item_factors_df = spark.read.parquet('../data/item_factors')

In [7]:
def get_item_factors_and_ids():
    item_factors = []
    item_ids = []
    for row in item_factors_df.collect():
        item_factors.append(row['features'])
        item_ids.append(row['id'])
    item_factors = np.array(item_factors)
    item_ids = np.array(item_ids)
    return item_factors, item_ids


# Setup item_factors and item_ids. Static values so need to load only once.
item_factors, item_ids = get_item_factors_and_ids()

In [26]:
print(item_factors.shape)
latent_feature_min_indices = np.argmin(item_factors, axis=0)
latent_feature_max_indices = np.argmax(item_factors, axis=0)
print(latent_feature_min_indices)
print(latent_feature_max_indices)
print(np.mean(item_factors, axis=0))
print(np.std(item_factors, axis=0))
restaurants_with_id_df.filter(F.col('item'))


(5063, 76)
[2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507
 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507
 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507
 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507
 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507 2507
 2507]
[5052 5052 5052 5052 5052 5052 5052 5052 5052 5052 2523 5052 5052 5052 2523
 2523 2523 5052 5052 2523 2523 5052 5052 5052 5052 5052 5052 5052 5052 5052
 5052 5052 2523 5052 5052 5052 5052 2523 5052 2523 5052 2523 5052 5052 5052
 5052 5052 5052 5052 5052 5052 5052 5052 5052 5052 2523 5052 5052 5052 5052
 5052 5052 5052 5052 2523 5052 5052 2523 2523 5052 2523 5052 5052 2523 5052
 5052]
[ 0.23230369  0.1813703   0.2409327   0.22594382  0.23874373  0.22225988
  0.22790824  0.24041144  0.26794456  0.2626804   0.24423099  0.18504971
  0.26034253  0.18649004  0.23455758  0.1732272   0.21123273  0.27664771
  0.23960582  0.29203717  0.26304475  0.23484258  0.18959452  0.24822531
  0.18454424  0.26344167  0.17946572  0.15693014  0.20254725  0.24624483
  0.14534585  0.23200018  0.24296027  0.24945348  0.27276554  0.20093754
  0.18396034  0.25870621  0.21838084  0.2187459   0.26783532  0.29531959
  0.22514799  0.2050532   0.22375954  0.21989655  0.24658956  0.20435621
  0.21890442  0.21140154  0.26982781  0.29345674  0.18865796  0.22181566
  0.17944633  0.2453817   0.23561451  0.29259167  0.22940577  0.18464882
  0.26329443  0.24673129  0.22234019  0.27967645  0.19322274  0.27555731
  0.26870162  0.21946027  0.25240019  0.35142462  0.19551784  0.32451395
  0.24726986  0.22979207  0.26841631  0.22908704]
[ 0.00974863  0.00919899  0.01340619  0.00967637  0.01009312  0.00937962
  0.00962972  0.01129337  0.01131177  0.01299049  0.01362863  0.0088832
  0.01072776  0.00924755  0.01199383  0.00942957  0.01166795  0.0114316
  0.01218659  0.01964124  0.01473186  0.00986773  0.00824201  0.01036032
  0.01269012  0.01208147  0.009158    0.0066376   0.00842509  0.01185779
  0.00630077  0.01668751  0.01647148  0.01211589  0.01369138  0.00886811
  0.00789277  0.01742021  0.00933451  0.01353953  0.01320364  0.01693771
  0.00927361  0.00854578  0.01256443  0.01103401  0.01068507  0.00890835
  0.01376875  0.01364582  0.01259555  0.01628691  0.00896676  0.00919429
  0.00754338  0.01580886  0.01158205  0.01318345  0.01213417  0.00796628
  0.01113059  0.01014678  0.00955555  0.01576926  0.01042589  0.01239077
  0.01130084  0.01117734  0.01497452  0.02186182  0.0103539   0.01565389
  0.01222761  0.01424172  0.01103993  0.01099869]

In [ ]: