Skip to content

Commit

Permalink
all lab
Browse files Browse the repository at this point in the history
  • Loading branch information
kaieye committed Jul 31, 2022
1 parent 5ddd1b4 commit 21da1d0
Show file tree
Hide file tree
Showing 62 changed files with 202,795 additions and 0 deletions.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0.23609531,0.35653394,0.12819964,0.3155024,0.23636407,-0.22917777,-0.14401537,0.075727165,-0.37003124,-0.021354377,-0.01885897,-0.016493738,0.069341004,-0.4371599,-0.24480343,0.29720962,-0.3823989,0.37802482,0.44692165,-0.024649143,-0.499193,-0.4379481,-0.27089447,-0.23521835,0.35863072,-0.33172983,-0.319295,0.33025116,0.06182003,0.1360473,-0.16637295,-0.07102251,0.05386877,-0.34516948,-0.07712108,-0.43622762,0.34539694,-0.008741677,0.3759488,0.2825104,-0.14146823,-0.4804005,-0.40585357,0.2456305,-0.49492687,0.10903448,0.1771251,0.056036294,0.1348601,-0.25175112,0.11369991,0.48754352,0.24302799,0.20360255,0.066055596,0.13253677,0.09305513,0.2777846,0.023716927,0.10299754,-0.43873072,0.3180564,0.11554575,-0.45647788,-0.1157915,-0.20613712,-0.3222394,0.0670712,0.1316781,-0.04797685,0.16459042,0.265431,-0.44394213,-0.11467117,0.42816055,0.11962229,-0.18784195,0.23876792,0.14463365,-0.054173946,-0.084650755,-0.022121489,-0.016997159,-0.478563,0.19013703,0.14574134,-0.36160725,0.12130946,0.28853047,-0.029776871,-0.06522834,-0.4273659,-0.075282395,-0.15572357,0.011092424,0.3204555,0.24933082,0.11092675,0.099162936,0.066720545,-0.24027056,-0.15710282,-0.17818177,-0.36245525,-0.113695264,0.24532175,0.15722138,0.031137764,-0.27763367,0.16367197,-0.28439033,0.46277177,0.29739505,0.27828717,0.019126952,0.11723554,-0.22481555,-0.30280328,0.34909534,-0.24122453,-0.16898203,-0.14903909,0.26694208,0.42356133,0.34913808,-0.03734243,0.10106343,-0.17388654,-0.49831563,0.07820094,-0.15342546,-0.36590004,0.1406846,-0.47188687,0.17444587,-0.095620334,-0.35815257,0.35699356,-0.2095989,0.027381063,-0.38910145,0.11574459,-0.14782763,-0.39063984,0.4066319,-0.32036752,-0.1109702,0.39456856,0.37689888,0.39321297,0.3459376,0.006568253,-0.11964077,0.49896598,-0.24059612,0.10720742,0.22555989,-0.28566742,-0.3495056,-0.4036088,-0.14572406,0.36242604,-0.27781355,-0.35440212,0.02017504,-0.014872491,-0.45937228,-0.46679926,-0.24539918,0.250834,0.48240495,-0.12271291,-0.18530989,0.4877388,-0.42409772,-0.17774671,-0.23274487,-0.20638466,0.10630572,-0.23506796,0.41716546,-0.48773384,0.34115142,-0.19853532,0.13483697,0.48814183,0.365838,0.167292,0.26261747,0.048885167,-0.053166866,-0.09033108,0.004328966,-0.07519597,-0.0074519515,-0.18805629,0.08991516,-0.074141085,0.011850238,0.0032178164,-0.13788843,-0.15182257,0.23794007,-0.44536138,-0.4325565,0.04964775,-0.39575815,0.13207555,0.46450144,0.48699808,0.0982306,-0.24818414,-0.43989497,-0.17733443,0.44720662,-0.4877941,0.32816106,0.1520955,-0.36473483,0.41389132,0.38561857,0.3391484,0.24316305,-0.34090698,0.18459034,0.22107542,0.46749824,0.24527848,-0.015088022,-0.19061911,0.39714372,0.17129338,-0.4565462,0.29424638,0.112273395,0.029050827,0.24344242,0.07746142,0.09415382,0.32288378,-0.12956685,-0.4886583,-0.28763622,-0.13127446,0.06799567,0.19962704,0.30296177,0.36935085,-0.09190476,0.26250762,0.40036052,-0.2743777,-0.49375767,-0.069624245,-0.29795706,0.051351905,0.025731027,0.3185677,0.1474306,0.29947352,0.07307166,0.19167346,-0.103982925,0.18090385,0.30783385,0.13168758,-0.28889263,-0.2867632,-0.14057803,-0.4823867,0.031248987,-0.3843631,-0.15378278,-0.2224276,0.117133915,-0.17432636,-0.4389227,-0.1263656,-0.19403148,0.07353908,-0.4833243,0.4543454,0.32979268,0.020353973,0.43283933,0.32527155,0.121165454,-0.19067067,-0.31272715,0.2764269,-0.1997847,-0.49005002,-0.13874072,-0.24505162,0.4553625,0.4179442,-0.042735636,-0.08790296,0.041137338,0.41393095,0.15213323,-0.09905428,0.13102758,0.12146121,0.1455949,-0.4549449,0.3422228,0.19875216,0.33838552,-0.31526864,0.15791327,-0.048859477,0.38249975,-0.2259838,0.4335224,0.091159284,0.1873228,-0.4408388,0.13642609,0.2911085,-0.4142604,0.36956507,0.17642796,-0.05092746,0.4400813,0.2210195,0.19256228,0.40831757,0.2089485,-0.2952068,-0.46408284,-0.024006605,0.09706116,0.30689007,-0.20083755,-0.065697074,-0.007901192,0.33029783,-0.04697472,-0.301004,-0.03436923,-0.18052393,0.205028,0.17713094,-0.2915638,0.13628459,0.18889505,-0.10857934,-0.105745375,-0.30238467,0.27625644,0.36994237,0.3764261,0.2511189,-0.30040634,0.17628974,-0.41685903,-0.109700084,-0.18728197,0.28934503,0.49773997,0.08808029,-0.110871136,0.03448701,-0.27818608,0.34542423,-0.37529272,-0.2853775,-0.26411527,0.41159993,-0.1375072,0.4988646,0.19680327,0.4818679,0.15024763,0.12502313,0.25538224,0.3805648,0.27491546,0.15169483,-0.30213797,0.11057246,0.37918144,-0.09437716,0.13442796,-0.019827485,-0.43407947,0.26273787,0.22879243,0.26459223,0.4303277,0.25444216,0.4908933,0.41282868,-0.097350895,-0.39770204,-0.2710728,0.3187216,0.49455476,-0.33418065,-0.22715044,-0.11189991,-0.37946326,0.17442083,0.1970753,0.3409006,0.27558124,-0.20686197,0.3802954,0.0137351155,-0.1563006,0.48745012,-0.032333493,0.11390352,-0.40698427,-0.21574324,0.27934504,0.05980569,0.3466866,0.07286084,0.35323143,-0.2696011,0.29613405,0.09769893,0.24693,0.26872206,-0.41006708,-0.04926592,-0.44144148,-0.41583318,0.15290171,0.068596244,0.10710317,-0.17657506,-0.47332853,-0.36171782,0.08478701,0.42955917,-0.40508574,0.45839047,0.4126075,-0.36223966,-0.3201397
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import numpy as np

def test_cofi_cost_func(target):
num_users_r = 4
num_movies_r = 5
num_features_r = 3

X_r = np.ones((num_movies_r, num_features_r))
W_r = np.ones((num_users_r, num_features_r))
b_r = np.zeros((1, num_users_r))
Y_r = np.zeros((num_movies_r, num_users_r))
R_r = np.zeros((num_movies_r, num_users_r))

J = target(X_r, W_r, b_r, Y_r, R_r, 2);
assert not np.isclose(J, 13.5), f"Wrong value. Got {J}. Did you multiplied the regulartization term by lambda_?"
assert np.isclose(J, 27), f"Wrong value. Expected {27}, got {J}. Check the regularization term"


X_r = np.ones((num_movies_r, num_features_r))
W_r = np.ones((num_users_r, num_features_r))
b_r = np.ones((1, num_users_r))
Y_r = np.ones((num_movies_r, num_users_r))
R_r = np.ones((num_movies_r, num_users_r))

# Evaluate cost function
J = target(X_r, W_r, b_r, Y_r, R_r, 0);

assert np.isclose(J, 90), f"Wrong value. Expected {90}, got {J}. Check the term without the regularization"


X_r = np.ones((num_movies_r, num_features_r))
W_r = np.ones((num_users_r, num_features_r))
b_r = np.ones((1, num_users_r))
Y_r = np.zeros((num_movies_r, num_users_r))
R_r = np.ones((num_movies_r, num_users_r))

# Evaluate cost function
J = target(X_r, W_r, b_r, Y_r, R_r, 0);

assert np.isclose(J, 160), f"Wrong value. Expected {160}, got {J}. Check the term without the regularization"

X_r = np.ones((num_movies_r, num_features_r))
W_r = np.ones((num_users_r, num_features_r))
b_r = np.ones((1, num_users_r))
Y_r = np.ones((num_movies_r, num_users_r))
R_r = np.ones((num_movies_r, num_users_r))

# Evaluate cost function
J = target(X_r, W_r, b_r, Y_r, R_r, 1);

assert np.isclose(J, 103.5), f"Wrong value. Expected {103.5}, got {J}. Check the term without the regularization"

num_users_r = 3
num_movies_r = 4
num_features_r = 4

#np.random.seed(247)
X_r = np.array([[0.36618032, 0.9075415, 0.8310605, 0.08590986],
[0.62634721, 0.38234325, 0.85624346, 0.55183039],
[0.77458727, 0.35704147, 0.31003294, 0.20100006],
[0.34420469, 0.46103436, 0.88638208, 0.36175401]])#np.random.rand(num_movies_r, num_features_r)
W_r = np.array([[0.04786854, 0.61504665, 0.06633146, 0.38298908],
[0.16515965, 0.22320207, 0.89826005, 0.14373251],
[0.1274051 , 0.22757303, 0.96865613, 0.70741111]])#np.random.rand(num_users_r, num_features_r)
b_r = np.array([[0.14246472, 0.30110933, 0.56141144]])#np.random.rand(1, num_users_r)
Y_r = np.array([[0.20651685, 0.60767914, 0.86344527],
[0.82665019, 0.00944765, 0.4376798 ],
[0.81623732, 0.26776794, 0.03757507],
[0.37232161, 0.19890823, 0.13026598]])#np.random.rand(num_movies_r, num_users_r)
R_r = np.array([[1, 0, 1], [1, 0, 0], [1, 0, 0], [0, 1, 0]])#(np.random.rand(num_movies_r, num_users_r) > 0.4) * 1

# Evaluate cost function
J = target(X_r, W_r, b_r, Y_r, R_r, 3);

assert np.isclose(J, 13.621929978531858, atol=1e-8), f"Wrong value. Expected {13.621929978531858}, got {J}."

print('\033[92mAll tests passed!')

Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import numpy as np
import pandas as pd
from numpy import loadtxt

def normalizeRatings(Y, R):
"""
Preprocess data by subtracting mean rating for every movie (every row).
Only include real ratings R(i,j)=1.
[Ynorm, Ymean] = normalizeRatings(Y, R) normalized Y so that each movie
has a rating of 0 on average. Unrated moves then have a mean rating (0)
Returns the mean rating in Ymean.
"""
Ymean = (np.sum(Y*R,axis=1)/(np.sum(R, axis=1)+1e-12)).reshape(-1,1)
Ynorm = Y - np.multiply(Ymean, R)
return(Ynorm, Ymean)

def load_precalc_params_small():

file = open('./data/small_movies_X.csv', 'rb')
X = loadtxt(file, delimiter = ",")

file = open('./data/small_movies_W.csv', 'rb')
W = loadtxt(file,delimiter = ",")

file = open('./data/small_movies_b.csv', 'rb')
b = loadtxt(file,delimiter = ",")
b = b.reshape(1,-1)
num_movies, num_features = X.shape
num_users,_ = W.shape
return(X, W, b, num_movies, num_features, num_users)

def load_ratings_small():
file = open('./data/small_movies_Y.csv', 'rb')
Y = loadtxt(file,delimiter = ",")

file = open('./data/small_movies_R.csv', 'rb')
R = loadtxt(file,delimiter = ",")
return(Y,R)

def load_Movie_List_pd():
""" returns df with and index of movies in the order they are in in the Y matrix """
df = pd.read_csv('./data/small_movie_list.csv', header=0, index_col=0, delimiter=',', quotechar='"')
mlist = df["title"].to_list()
return(mlist, df)





Loading

0 comments on commit 21da1d0

Please sign in to comment.