Skip to content

Instantly share code, notes, and snippets.

@franckjay
franckjay / torch_forward.py
Created March 25, 2024 02:59
Forward call for multiple user and item embeddings
def forward(self, x, u_cats, i_cats):
"""
Forward pass
:param x: Float Tensor
:param u_cats: User index tensor
:param i_cats: Item index tensor
:return: Predictions for this batch
"""
curr_batch_size = len(u_cats)
# Take User and Item embeddings for each value
@franckjay
franckjay / dataloader.py
Last active March 25, 2024 02:54
DataSet for PyTorch with multiple embeddings
class DictDataset(Dataset):
def __init__(self, data_dict, norm_target=1, scaler=None):
self.norm_target = norm_target
self.data_df = build_pandas_ranking(data_dict)
self.scaler = scaler
# Build out the features that are continuous variables
self.float_features = []
for feat in self.data_df.columns:
# Take our first user:
sparse.to_dense()[0]
>> tensor([1., 0., 0., 0., 1., 1.])
# Then multiply by B:
sparse.to_dense()[0]@B
>> tensor([ 0.9660, -0.0065, -0.0065, 0.0227, 0.9660, 0.9756])
B = P / (-1*P.diag())
B = B + torch.eye(B.shape[0])#Set diagonals to 0
tensor([
[ 0.0000, 0.1296, 0.1296, -0.4540, 0.6806, 0.4878],
[ 0.1296, 0.0000, 0.6806, 0.4878, 0.1296, -0.4540],
[ 0.1296, 0.6806, 0.0000, 0.4878, 0.1296, -0.4540],
[-0.2656, 0.2854, 0.2854, 0.0000, -0.2656, 0.9308],
[ 0.6806, 0.1296, 0.1296, -0.4540, 0.0000, 0.4878],
[ 0.2854, -0.2656, -0.2656, 0.9308, 0.2854, 0.0000]
])
P = G.inverse()
tensor([
[11.9006, -1.5420, -1.5420, 3.1611, -8.0994, -3.3963],
[-1.5420, 11.9006, -8.0994, -3.3963, -1.5420, 3.1611],
[-1.5420, -8.0994, 11.9006, -3.3963, -1.5420, 3.1611],
[ 3.1611, -3.3963, -3.3963, 6.9624, 3.1611, -6.4803],
[-8.0994, -1.5420, -1.5420, 3.1611, 11.9006, -3.3963],
[-3.3963, 3.1611, 3.1611, -6.4803, -3.3963, 6.9624]
])
lambda_ = 0.05
G += torch.eye(G.shape[0])*lambda_
tensor([
[1.0500, 0.0000, 0.0000, 0.0000, 1.0000, 1.0000],
[0.0000, 1.0500, 1.0000, 1.0000, 0.0000, 0.0000],
[0.0000, 1.0000, 1.0500, 1.0000, 0.0000, 0.0000],
[0.0000, 1.0000, 1.0000, 2.0500, 0.0000, 1.0000],
[1.0000, 0.0000, 0.0000, 0.0000, 1.0500, 1.0000],
[1.0000, 0.0000, 0.0000, 1.0000, 1.0000, 2.0500]
])
G = sparse.to_dense().t()@sparse.to_dense()
tensor([
[1., 0., 0., 0., 1., 1.],
[0., 1., 1., 1., 0., 0.],
[0., 1., 1., 1., 0., 0.],
[0., 1., 1., 2., 0., 1.],
[1., 0., 0., 0., 1., 1.],
[1., 0., 0., 1., 1., 2.]
])
@franckjay
franckjay / sparse.py
Created December 30, 2020 23:17
Build a Sparse Tenor
indices = torch.LongTensor(
train[[user_id_col, item_id_col]].values
)
values = torch.ones(indices.shape[0])
torch.sparse.FloatTensor(indices.t(), values)
import React, { useState, useEffect } from 'react';
import { Image } from "semantic-ui-react";
export const GrabBook = ()=> {
const [outputURL, setOutputURL] = useState("");
useEffect(() => {
fetch("/novel_novel").then(response =>
response.json().then(data => {
setOutputURL(data.image_url);
})
);
import React, { useState } from 'react';
import { Form, Input, Button } from 'semantic-ui-react';
export const BookEntry = () => {
const [title, setTitle] = useState(''); // Empty String
return (
<Form>
<Form.Field>
<Input