Skip to content

Commit

Permalink
fixed file-tree
Browse files Browse the repository at this point in the history
  • Loading branch information
Eduardo Leao authored and Eduardo Leao committed Mar 26, 2024
1 parent 7a23477 commit 43086a9
Show file tree
Hide file tree
Showing 7 changed files with 205 additions and 32 deletions.
45 changes: 42 additions & 3 deletions assets/demo/demo.css
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ h1 {
h2 {
text-align: center;
margin: auto;
color: #0d0d0d;
}

.container {
Expand All @@ -39,7 +40,7 @@ h2 {
max-width: 800px;
background-color: #f4f4f4;
padding: 20px;
margin-top: 15px;
margin-top: 11px;
margin-left: 15px;
border-radius: 10px;
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.1);
Expand Down Expand Up @@ -68,7 +69,7 @@ h2 {

#left-separator {
width: 70%;
margin-top: 15px;
margin-top: 11px;
margin-left: 15px;
margin-bottom: 20px;
align-self: bottom;
Expand Down Expand Up @@ -141,7 +142,7 @@ button:active {
}

.icon {
width: 70%;
width: 72%;
margin: auto;
}

Expand All @@ -165,3 +166,41 @@ button:active {
#graph {
width: 100%;
}

li{
font-size: 15px;
color: #464646;
}

span {
font-size: 16px;
font-weight: bold;
color: #0d0d0d;
}

input {
font-size: 14.5px;
}

#iter {
margin-left: 50px;
margin-top: 0px;
font-size: 16px;
font-weight: bold;
color:#1b1b1b;
}

#total-visited {
margin-left: 50px;
margin-top: 0px;
font-size: 16px;
font-weight: bold;
color:#1b1b1b;
}

#loss {
margin-left: 50px;
font-size: 16px;
font-weight: bold;
color:#1b1b1b;
}
90 changes: 69 additions & 21 deletions assets/demo/demo.html
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,29 @@
<div style="height: 20px;"></div>
<div class="container">
<div class="separator" id="right-separator">
<img class='icon' src="../../assets/icon_gray_bg.png">
<img style="display: block;" class='icon' src="../../assets/icon_gray_bg.png">
</div>

<div class="separator" id="right-separator" style="margin-top: 10px;">
<ul>
<li><span>DATASET:</span> This model was trained on a <b>Dummy dataset</b>, composed of <b>randomly generated images</b>. </li>
<br>
<li><span>INPUT:</span> The batch size is <b>8</b>, and the images have a dimension of <b>32x32</b>. </li>
<br>
<li><span>OUTPUT:</span> The output is a number from 0 to 31.</li>
<br>
<li><span>TRAINING:</span> The Model is trained using an <b>Adam Optimizer</b> and a <b>Cross Entropy Loss</b>.</li>
</ul>
</div>
</div>

<div class="container" id="upper-container">
<div class="left-div">
<h2 style="display: inline-block;">Training Parameters</h2>
<h2 style="display: inline-block; margin-left: 20px; margin-top: 2px; margin-bottom: 10px;">Training Parameters</h2>
<div class="separator" id="left-separator">

<!-- <label for="batch-size">Batch Size:</label>
<input type="number" id="batch-size" name="batch-size" min="1" value="32" required> -->
<label for="batch-size">Batch Size:</label>
<input type="number" id="batch-size" name="batch-size" min="1" value="8" max="64" onchange="checkInput(this)">

<label for="learning-rate">Learning Rate:</label>
<input type="number" id="learning-rate" name="learning-rate" step="0.001" min="0" value="0.001" required>
Expand All @@ -51,7 +63,7 @@ <h2 style="display: inline-block;">Training Parameters</h2>
</div>
<div class="right-div">
<div>
<h2 style="display: inline-block; margin-right: 15px;">Model Layers</h2>
<h2 style="display: inline-block; margin-right: 15px; margin-left: 20px;">Model Layers</h2>
<button class='layer-button' onclick="addBox()">+</button>
<button class='layer-button' onclick="removeBox()">-</button>
<div class="separator" id="layersBox">
Expand All @@ -70,18 +82,19 @@ <h2 style="display: inline-block; margin-right: 15px;">Model Layers</h2>
<h2>Graph</h2>
<!-- This is where the graph will be displayed -->
<canvas id="graph" width="700" height="350"></canvas>
<div class="separator" style="width: 91.5%;">
<p>
This model was trained on a <b>Dummy dataset</b>, composed of <b>randomly generated images</b>. The batch size is <b>8</b>, and the images have a dimension of <b>32x32</b>. The output is a number from 0 to 31.
</p>
</div>
<p id="iter"> Iteration: </p>
<p id="total-visited"> Total Training Examples: </p>
<p id="loss"> Loss: </p>
</div>

<script>
let boxCount = [];
let data = [];
let training = false;
let in_loop = true;
let overFlow = 1;
let iter = 0;
let total_visited = 0;

function addBox() {
if (boxCount.length < 5 && !training) {
Expand Down Expand Up @@ -123,6 +136,25 @@ <h2>Graph</h2>
boxCount[el.idx] = Number(el.value);
};

function checkInput(el){
if (el.value > 64) {
el.value = 64;
};
}

function get_batch(x, y, batch_size) {
// Instantiate x_batch and y_batch as empty tensors:
let x_batch = [];
let y_batch = [];
// Iteratively add instances to batch:
for (let i=0 ; i < batch_size ; i++) {
p = Math.floor(Math.random() * x.length);
x_batch.push(x.data[p])
y_batch.push(y.data[p])
};
return [torch.tensor(x_batch), torch.tensor(y_batch)];
};

function trainLoopInitializer() {
in_loop = true;
trainLoop();
Expand Down Expand Up @@ -202,21 +234,26 @@ <h2>Graph</h2>

};

// Build optimizer with live learning rate and regularization values.
lr = Number(document.getElementById('learning-rate').value)
reg = Number(document.getElementById('regularization').value )
beta1 = Number(document.getElementById('beta1').value )
beta2 = Number(document.getElementById('beta2').value )
eps = Number(document.getElementById('epsilon').value )
// Get live learning rate and regularization values.
let batch_size = Number(document.getElementById('batch-size').value)
let lr = Number(document.getElementById('learning-rate').value)
let reg = Number(document.getElementById('regularization').value )
let beta1 = Number(document.getElementById('beta1').value )
let beta2 = Number(document.getElementById('beta2').value )
let eps = Number(document.getElementById('epsilon').value )

// Build optimizer:
let optimizer = new optim.Adam(model.parameters(), lr=lr, reg=reg, betas=[beta1, beta2], eps=eps)
let loss;

// Training Loop:
for(let i=0 ; i < 1 ; i++) {
let z = model.forward(x)
let [x_batch, y_batch] = get_batch(x, y, batch_size)

let z = model.forward(x_batch)

// Get loss:
loss = loss_func.forward(z, y)
loss = loss_func.forward(z, y_batch)

// Backpropagate the loss using neuralforge.tensor's backward() method:
loss.backward()
Expand All @@ -227,10 +264,19 @@ <h2>Graph</h2>
// Reset the gradients to zero after each training step:
optimizer.zero_grad()

// If loss went to infinity (model way too large for training size), represent that in the graph:
if (isNaN(loss.data[0])) {
data.push(12 + (Math.random() - 0.5) * 15)
}
else {data.push(loss.data)}
overFlow = overFlow * 1.5;
data.push(overFlow + (Math.random() - 0.5) * 15);
// If not, just keep adding the loss to the graph:
} else {data.push(loss.data)}

// Display iteration and loss on the screen:
document.getElementById('iter').innerHTML = `Iteration: ${iter}`;
document.getElementById('total-visited').innerHTML = `Total Training Examples: ${total_visited}`;
document.getElementById('loss').innerHTML = `Loss: ${loss.data[0].toFixed(3)}`;
iter += 1;
total_visited += batch_size;

plotGraph()
};
Expand All @@ -243,6 +289,8 @@ <h2>Graph</h2>
function resetTraining() {
in_loop = false;
training = false;
iter = 0;
total_visited = 0;
data = [];
plotGraph();
let boxCount = [];
Expand Down
23 changes: 23 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"name": "js-pytorch",
"version": "0.1.0",
"description": "A JavaScript library like PyTorch, built from scratch.",
"main": "index.js",
"directories": {
"test": "tests"
},
"scripts": {
"test": "node ./tests/test.js"
},
"keywords": ["deep-learning", "machine-learning", "PyTorch"],
"repository": {
"type": "git",
"url": "git+https://github.com/eduardoleao052/js-torch.git"
},
"author": "Eduardo Leitao da Cunha Opice Leao",
"license": "MIT",
"bugs": {
"url": "https://github.com/eduardoleao052/js-torch/issues"
},
"homepage": "https://github.com/eduardoleao052/js-torch#readme"
}
44 changes: 44 additions & 0 deletions src/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
const torch = require('./tensor.js');


var jstorch = {
// Add methods from tensor.js (these methods are accessed with "torch."):
Tensor: torch.Tensor,
Parameter: torch.Parameter,
add: torch.add,
neg: torch.neg,
mul: torch.mul,
div: torch.div,
matMul: torch.matMul,
exp: torch.exp,
log: torch.log,
sqrt: torch.sqrt,
pow: torch.pow,
mean: torch.mean,
masked_fill: torch.masked_fill,
variance: torch.variance,
at: torch.at,
reshape: torch.reshape,
_reshape: torch._reshape,
variance: torch.variance,
transpose: torch.transpose,
tensor: torch.tensor,
randint: torch.randint,
randn: torch.randn,
rand: torch.rand,
tril: torch.tril,
ones: torch.ones,
zeros: torch.zeros,
tensor: torch.tensor,
broadcast: torch.broadcast,
// Add submodules:
nn: require('./layers.js'),
optim: require('./optim.js')

};

if (typeof window !== 'undefined') {
window.jstorch = jstorch;
} else {
module.exports = jstorch;
};
2 changes: 1 addition & 1 deletion src/layers.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
if (typeof window === 'undefined'){
globalThis.utils = require('./utils.js');
globalThis.torch = require('./tensor.js')
globalThis.torch = require('./tensor.js');
};

var nn = (function(exports){
Expand Down
1 change: 0 additions & 1 deletion src/tensor.js
Original file line number Diff line number Diff line change
Expand Up @@ -1976,7 +1976,6 @@ var torch = (function(exports){
// Add all functions to exports:
exports.Tensor = Tensor;
exports.Parameter = Parameter;

exports.add = add;
exports.neg = neg;
exports.mul = mul;
Expand Down
Loading

0 comments on commit 43086a9

Please sign in to comment.