0
0 комментариев

Добрый день!
Выдает ошибку. Помогите, пожалуйста, найти причину

<span class="ansi-cyan-intense-fg ansi-bold">File </span><span class="ansi-green-intense-fg ansi-bold">"<ipython-input-1-1576577dd68b>"</span><span class="ansi-cyan-intense-fg ansi-bold">, line </span><span class="ansi-green-intense-fg ansi-bold">28</span>
<span class="ansi-yellow-intense-fg ansi-bold">    For I in range(num_epochs):</span>
<span class="ansi-white-intense-fg ansi-bold">        ^</span>
<span class="ansi-red-intense-fg ansi-bold">SyntaxError</span><span class="ansi-red-intense-fg ansi-bold">:</span> invalid syntax
def sigmoid_derivative(x): #представление производной сигмовидной функции
    return sigmoid(x)*(1-sigmoid(x))
def loss(y_pred, y):
    y_pred=y_pred.reshape(-1,1)
    y =np.array(y).reshape(-1,1)
    return 0.5*np.mean((y_pred-y)**2)
class Neuron:
    def __init__(self, w=None, b=0) :
        self.w=w
        self.b=b
    def activate (self, x):
        return sigmoid(x)
    def forward_pass(self, X): 
        n=X.shape[0]
        y_pred=np.zeros((n, 1))
        y_pred=self.activate(X @ self.w+self.b)
        return y_pred.reshape(-1,1)
    def backward_pass(self, X, y, y_pred, learning_rate=0.01):
        n=len(y)
        y=np.array(y).reshape(-1,1)
        sigma=self.activate(X @ self.w+self.b)
        self.w=self.w-learning_rate*(X.T@((sigma-y)*sigma*(1-sigma)))/n
        self.b=self.b-learning_rate*np.mean((sigma-y)* sigma*(1-sigma))
    def fit(self, X, y, num_epochs=5000):
        self.w=np.zeros((X.shape[1],1))
        self.b=0
        Loss_values=[]
        For I in range(num_epochs):
            y_pred=self.forward_pass(X)
            Loss_values.append(Loss(y_pred, y))
            self.backward_pass(X, y, y_pred)
        return Loss_values
w=np.array([0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2]
b=0.0
X=np.array([[1, 0.75, 1, 0.5, 0, 0.5, 1, 1, 1],
            [0.6, 0.5, 0.75, 0, 0, 0, 0, 0.8, 1],
            [1, 1, 0.75, 0.5, 0.5, 0.5, 0.5, 1, 1],
            [0.6, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.6, 0.6],
            [0.6, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.6, 0.6],
            [1, 0.75, 0.75, 0, 0, 0.5, 0.5, 1, 1],
            [0.6, 0.5, 0.75, 0.5, 0, 0.5, 1, 1, 1],
            [0.6, 0.75, 0.5, 0, 0, 0, 0, 0.4, 0.4],
            [0.6, 0.75, 0.5, 0, 0, 0, 0, 1, 1],
            [0.6, 1, 0.25, 0, 0, 0.5, 0, 0.6,  0.8],
            [0.6, 0.75, 0.75, 0.5, 0.5, 0.5, 1, 1, 1],
            [0.6, 0.5, 0.25, 0, 0, 0, 0, 1, 1],
            [0.2, 0.5, 0.5, 0, 0, 0, 0, 0.2, 0.2],
            [0.6, 0.25, 0.75, 0, 0, 0, 0, 0.8, 0.4],
            [0.6, 1, 0.75, 0, 0, 1, 1, 1, 1],
            [0.6, 0.5,  0.5, 0, 0, 0.5, 0.5, 0.8, 0.8],
            [1, 1, 1, 0.5, 0, 0, 1, 0.2, 0.2],
            [0.8, 0.75, 0.75, 1, 1, 0.5, 0.5, 1, 1],
            [0.8, 0.75, 0.75, 0, 0, 0.5, 0.5, 0.8,  0.8],
            [1, 0.75, 0.75, 0, 0, 0.5, 0.5, 1, 1],
            [0.8, 0.75, 0.75,  0.5, 0.5, 0.5, 0.5, 1, 1],
            [0.6, 0.5, 0.75, 0, 0, 0.5, 0.5, 1, 1],
            [1, 1, 1, 0.5, 0.5, 1, 1, 1, 1],
            [0.8, 0.75, 1, 0.5, 0, 0.5, 1, 1, 1],
            [0.8, 0.25, 0.75, 0.5, 0, 0,  0.5, 1, 1],
            [0.6, 0.25, 0.5, 0, 0, 0.5, 0.5, 0.8, 1]])
y=np.array([1, 0.6, 1, 0.4, 0.4, 1, 0.4, 0.4, 0.8, 0.8, 0.6, 0.6, 0.2, 0.2, 1, 0.4, 1, 0.8, 0.6, 1, 0.8, 0.4, 1, 0.8, 0.4 0.8]).reshape(26,1)
neuron=Neuron(w, b)
y_pred=neuron.forward_pass(X)
neuron.backward_pass(X, y, y_pred)
print (“w_otn=” + str(((neuron.w-0.2)*1000))
Loss_values=neuron.fit(X,y)


Добавить комментарий