from mpl_toolkits.mplot3d import Axes3D
from sklearn import datasets, svm, metrics, manifold, decomposition, discriminant_analysis
from sklearn.datasets import load_digits
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import cross_validate
from sklearn.svm import SVC
from sklearn import datasets, linear_model
from sklearn.model_selection import cross_val_predict
from sys import argv
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import random
import numpy as np
import copy as cp
import math
from matplotlib import offsetbox
from sklearn.linear_model import LogisticRegression
def linear(t, b, c):
return b[0]*t + c[0], b[1]*t + c[1], b[2]*t + c[2]
def polinomial(t, a, b, c):
return a[0]*(t**2) + b[0]*t + c[0], a[1]*(t**2) + b[1]*t + c[1], a[2]*(t**2) + b[2]*t + c[2]
#Recebe duas matrizes, uma com os targets e outra com os resultado do modelo
def mse(P1, P2):
n = len(P1[0])
d = [(P1[i] - P2[i])**2 for i in [0,1,2]]
return sum([sum(d[i]) for i in [0,1,2]])/(2*float(n))
def plot_linear(dataset, a, b, angle1, angle, multiplicadorFrame = 1, limite = 1000):
x = dataset.iloc[:, 1]
y = dataset.iloc[:, 2]
z = dataset.iloc[:, 3]
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, z)
framerate = 1/60
t = []
X_pred = []
Y_pred = []
Z_pred = []
acc = multiplicadorFrame*framerate
while True:
acc += framerate
t.append(acc)
predictions = np.array(linear(acc, a, b))
X_pred.append(predictions[0])
Y_pred.append(predictions[1])
Z_pred.append(predictions[2])
if (predictions[2] < 0 or len(t) > limite):
break
X, Y = np.meshgrid(np.linspace(-3,3,2), np.linspace(0,2,2))
Z = np.zeros(X.shape)
ax.plot_surface(X, Y, Z, shade=False, color='g', alpha=.4)
ax.plot([-3, -3, 3, 3], [0, 1, 1, 0], zdir='z', zs=0, c='g')
ax.plot([-3, -3, 3, 3], [0, 0.3, 0.3, 0], zdir='y', zs=0, c='black')
ax.plot(X_pred, Z_pred, zdir='y', zs=2, alpha=.25, c='k')
ax.plot(Y_pred, Z_pred, zdir='x', zs=-3, alpha=.25, c='k')
ax.plot(X_pred, Y_pred, zdir='z', zs=0, alpha=.25, c='k')
ax.set_ylabel('Y')
ax.set_xlabel('X')
ax.set_zlabel('Z')
ax.set_xlim(-3, 3)
ax.set_ylim(0, 2.1)
ax.set_zlim(0, 0.5)
ax.plot(X_pred, Y_pred, Z_pred, color='red')
ax.view_init(angle1, angle)
plt.title("Linear")
plt.show()
def plot_polinomial(dataset, a, b, c, angle, multiplicadorFrame = 1, limite = 1000):
x = dataset.iloc[:, 1]
y = dataset.iloc[:, 2]
z = dataset.iloc[:, 3]
fig = plt.figure(figsize=(4,4))
ax = fig.add_subplot(111, projection='3d')
ax.scatter(x, y, z)
framerate = 1/60
t = []
X_pred = []
Y_pred = []
Z_pred = []
acc = framerate * multiplicadorFrame
while True:
acc += framerate
t.append(acc)
predictions = np.array(polinomial(acc, a, b, c))
#X_pred.append(predictions[0])
#Y_pred.append(predictions[1])
#Z_pred.append(predictions[2])
X_pred.append(b[0]*acc + c[0])
Y_pred.append(b[1]*acc + c[1])
Z_pred.append(a[2]*(acc**2) + b[2]*acc + c[2])
if (predictions[2] < 0 or len(t) > limite):
break
X, Y = np.meshgrid(np.linspace(-3,3,2), np.linspace(0,2,2))
Z = np.zeros(X.shape)
ax.plot_surface(X, Y, Z, shade=False, color='g', alpha=.4)
ax.plot([-3, -3, 3, 3], [0, 1, 1, 0], zdir='z', zs=0, c='g')
ax.plot([-3, -3, 3, 3], [0, 0.3, 0.3, 0], zdir='y', zs=0, c='black')
ax.plot(X_pred, Z_pred, zdir='y', zs=2, alpha=.25, c='k')
ax.plot(Y_pred, Z_pred, zdir='x', zs=-3, alpha=.25, c='k')
ax.plot(X_pred, Y_pred, zdir='z', zs=0, alpha=.25, c='k')
ax.set_xlim(-3, 3)
ax.set_ylim(0, 2.1)
ax.set_zlim(0, 0.5)
ax.set_ylabel('Y')
ax.set_xlabel('X')
ax.set_zlabel('Z')
ax.plot(X_pred, Y_pred, Z_pred, color='red')
ax.view_init(10, angle)
plt.title("Polinomial")
plt.show()
def plot_bar(title, errors):
fig = plt.figure()
ax = fig.add_axes([0,0,1,1])
ax.set_ylim(0, 0.01)
labels = ['linear', 'polinomial']
ax.bar(labels,errors)
ax.bar_label(ax.containers[0], label_type='edge')
plt.title(title)
plt.show()
def batch_linear(dataset, epochs=1000, learning_rate=0.0001):
t = dataset.iloc[:, 0]
X = dataset.iloc[:, 1]
Y = dataset.iloc[:, 2]
Z = dataset.iloc[:, 3]
b = [0, 0, 0]
c = dataset.values[0][1:] #inicialização com o primeiro ponto de dados
results = []
iterations = []
x = np.linspace(0.02, 1, 1)
fig, ax = plt.subplots()
ax.grid(True, which='both')
ax.axhline(y=0, color='k')
ax.set_ylabel('Errors')
ax.set_xlabel('Epoch')
n = float(len(X))
for i in range(epochs):
X_pred, Y_pred, Z_pred = linear(t, b, c)
targets = np.array([X, Y, Z])
predictions = np.array([X_pred, Y_pred, Z_pred])
#plot
error = mse(targets, predictions) #Soma dos erros para X, Y e Z
results.append(error)
iterations.append(i)
# Partial derivatives
d = [(targets[i] - predictions[i]) for i in [0,1,2]] #distances between targets and predictions
# ∂rmse/∂a
b_deriv = [-(sum(d[i]*t))/float(n) for i in [0,1,2]]
# ∂rmse/∂b
c_deriv = [-sum(d[i])/float(n) for i in [0,1,2]]
# We subtract because the derivatives point in direction of steepest ascent
b -= np.array(b_deriv) * learning_rate
c -= np.array(c_deriv) * learning_rate
plt.plot(iterations, results)
plt.show()
print("Erro final: " + str(error))
return b, c, error
def batch_polinomial(dataset, epochs=1000, learning_rate=0.0001):
t = dataset.iloc[:, 0]
X = dataset.iloc[:, 1]
Y = dataset.iloc[:, 2]
Z = dataset.iloc[:, 3]
a = [0, 0, 0]
b = [0, 0, 0]
c = dataset.values[0][1:] #inicialização com o primeiro ponto de dados
results = []
iterations = []
x = np.linspace(0.02, 1, 1)
fig, ax = plt.subplots()
ax.grid(True, which='both')
ax.axhline(y=0, color='k')
ax.set_ylabel('Errors')
ax.set_xlabel('Epoch')
n = float(len(X))
for i in range(epochs):
X_pred, Y_pred, Z_pred = polinomial(t, a, b, c)
targets = np.array([X, Y, Z])
predictions = np.array([X_pred, Y_pred, Z_pred])
#plot
error = mse(targets, predictions) #Soma dos erros para X, Y e Z
results.append(error)
iterations.append(i)
# Partial derivatives
d = [(targets[i] - predictions[i]) for i in [0,1,2]] #distances between targets and predictions
# ∂rmse/∂a
a_deriv = [0, 0, -(sum(d[2]*(t**2)))/float(n)]
# ∂rmse/∂b
b_deriv = [-(sum(d[i]*t))/float(n) for i in [0,1,2]]
# ∂rmse/∂c
c_deriv = [-sum(d[i])/float(n) for i in [0,1,2]]
# We subtract because the derivatives point in direction of steepest ascent
a[2] -= np.array(a_deriv[2]) * learning_rate
b -= np.array(b_deriv) * learning_rate
c -= np.array(c_deriv) * learning_rate
plt.plot(iterations, results)
plt.show()
print("Erro final: " + str(error))
return a, b, c, error
def best_estimator(estimators):
index_max_score = np.argmax(estimators['test_score'])
best_estimator = estimators['estimator'][index_max_score]
return best_estimator
#CHUTE 1
kick1_path = "/work/data/kick1.dat"
data_kick1 = pd.read_csv(kick1_path, sep=" ", header=None)
framerate = 1/5
t = []
acc=0
for i in range(len(data_kick1[1])):
acc += framerate
t.append(acc)
data_kick1.insert(loc=0, column='t', value=t)
#CHUTE 2
kick2_path = "/work/data/kick2.dat"
data_kick2 = pd.read_csv(kick2_path, sep=" ", header=None)
framerate = 1/5
t = []
acc=0
for i in range(len(data_kick2[1])):
acc += framerate
t.append(acc)
data_kick2.insert(loc=0, column='t', value=t)
epochs_l1