import torch
import torch.optim as optim
import tensorflow as tf
import torch.nn as nn
import torch.nn.functional as F
import numpy as np

텐서플로

x_data = np.array([[1, 2, 1, 1],[2, 1, 3, 2],[3, 1, 3, 4],[4, 1, 5, 5],[1, 7, 5, 5],[1, 2, 5, 6],[1, 6, 6, 6],[1, 7, 7, 7]], dtype=np.float32)
y_data = np.array([[0, 0, 1],[0, 0, 1],[0, 0, 1],[0, 1, 0],[0, 1, 0],[0, 1, 0],[1, 0, 0],[1, 0, 0]], dtype=np.float32)
x_data = torch.FloatTensor(x_data)
y_data = torch.FloatTensor(y_data)

w = tf.Variable(tf.random.normal([4, 3]))
b = tf.Variable(tf.random.normal([3]))

learning_rate = 0.001

for i in range(50001):

    with tf.GradientTape() as tape:

        hypothesis = tf.nn.softmax(tf.matmul(x_data, w) + b)
        cost = tf.reduce_mean(-tf.reduce_sum(y_data*tf.math.log(hypothesis), axis=1))        
        w_grad, b_grad = tape.gradient(cost, [w, b])

        w.assign_sub(learning_rate * w_grad)
        b.assign_sub(learning_rate * b_grad)
    if i % 500 == 0:
        print(f'Step : {i}, Cost : {cost}, Weight : {w.numpy()[0]}, Bias : {b.numpy()[0]}')

파이토치

x_data = np.array([[1, 2, 1, 1],[2, 1, 3, 2],[3, 1, 3, 4],[4, 1, 5, 5],[1, 7, 5, 5],[1, 2, 5, 6],[1, 6, 6, 6],[1, 7, 7, 7]], dtype=np.float32)
# y_data = np.array([[0, 0, 1],[0, 0, 1],[0, 0, 1],[0, 1, 0],[0, 1, 0],[0, 1, 0],[1, 0, 0],[1, 0, 0]], dtype=np.float32)
y_data = np.array([2,2,2,1,1,1,0,0])
x_data = torch.FloatTensor(x_data)
y_data = torch.LongTensor(y_data)
w = torch.zeros((4, 3), requires_grad=True)
b = torch.zeros(1, requires_grad=True)
learning_rate = 0.001

optimizer = optim.SGD([w, b], learning_rate)

for i in range(50001) :
    z = x_data.matmul(w) + b
    cost = F.cross_entropy(z, y_data)
    optimizer.zero_grad()
    cost.backward()
    optimizer.step()
    if i % 500 == 0 :
        print(f'Step : {i}, Cost : {cost.item()}')

파이토치(with nn_module)

class SoftmaxClassifierModel(nn.Module):
    def __init__(self):
        super().__init__()
        self.linear = nn.Linear(4, 3)

    def forward(self, x):
        return self.linear(x)
x_data = np.array([[1, 2, 1, 1],[2, 1, 3, 2],[3, 1, 3, 4],[4, 1, 5, 5],[1, 7, 5, 5],[1, 2, 5, 6],[1, 6, 6, 6],[1, 7, 7, 7]], dtype=np.float32)
# y_data = np.array([[0, 0, 1],[0, 0, 1],[0, 0, 1],[0, 1, 0],[0, 1, 0],[0, 1, 0],[1, 0, 0],[1, 0, 0]], dtype=np.float32)
y_data = np.array([2,2,2,1,1,1,0,0])
x_data = torch.FloatTensor(x_data)
y_data = torch.LongTensor(y_data)

model = SoftmaxClassifierModel()
learning_rate = 0.001
optimizer = optim.SGD(model.parameters(), learning_rate)

for i in range(50001) :
    prediction = model(x_data)
    cost = F.cross_entropy(prediction, y_data)
    optimizer.zero_grad()
    cost.backward()
    optimizer.step()
    if i % 500 == 0 :
        print(f'Step : {i}, Cost : {cost.item()}')

 

반응형

+ Recent posts