def predict(X, w, b): # X Input. # Calculating presictions/y_hat. preds = sigmoid(np.dot(X, w) + b) # Empty List to s

Business, Finance, Economics, Accounting, Operations Management, Computer Science, Electrical Engineering, Mechanical Engineering, Civil Engineering, Chemical Engineering, Algebra, Precalculus, Statistics and Probabilty, Advanced Math, Physics, Chemistry, Biology, Nursing, Psychology, Certifications, Tests, Prep, and more.
Post Reply
answerhappygod
Site Admin
Posts: 899603
Joined: Mon Aug 02, 2021 8:13 am

def predict(X, w, b): # X Input. # Calculating presictions/y_hat. preds = sigmoid(np.dot(X, w) + b) # Empty List to s

Post by answerhappygod »

def predict(X, w, b):

# X Input.

# Calculating presictions/y_hat.
preds = sigmoid(np.dot(X, w) + b)

# Empty List to store predictions.
pred_class = []

# Delete the following two lines and replace it with your own
for i in preds:
pred_class.append(0)

# if y_hat >= 0.5 round up to 1
# if y_hat < 0.5 round down to 0

###
### YOUR CODE HERE
###

return np.array(pred_class)
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 1
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 1 (17.96 KiB) Viewed 55 times
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 2
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 2 (26.25 KiB) Viewed 55 times
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 3
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 3 (30.85 KiB) Viewed 55 times
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 4
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 4 (9.48 KiB) Viewed 55 times
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 5
Def Predict X W B X Input Calculating Presictions Y Hat Preds Sigmoid Np Dot X W B Empty List To S 5 (15.49 KiB) Viewed 55 times
Step 5 Extract only the x data def extract_only_x_data(dataset): if len(dataset) == 0: return data = list) for i in range(0, len(dataset)): data.append(list) for j in range(0, len (dataset) - 1): data[-1].append(float(dataset[j])) return data Step 6 Extract only the y data def extract_only_y_data(dataset): if len(dataset) return == @: data = list) for i in range(0, len(dataset)): data.append(int(dataset[-1])) return data

Step 7 1 Define sigmoid function def sigmoid(z): z = 1/(1 + np.exp(-z)) # Return the value of the implemented sigmoid function, do not simply return z return z Step 8 Define loss function def loss(y, y_hat): loss = - np.mean(y*(np.log(y_hat)) - (1-y) *np.log(1-y_hat)) return loss Step 9 Define gradients function def gradients(x, y, y_hat): # X Input. # y true/target value. # y_hat predictions. # w weights. # b bias. # number of training examples. numner_of_examples = X.shape[O] # Gradient of Loss weights. (1/numner_of_examples)*np.dot(X.T, (y_hat - y)) dw = # Gradient of Loss bias. (1/numner_of_examples)*np. sum((y_hat - y)) db = return dw, db

Step 10 Train the dataset def train(x, y, batch_size, epochs, learning_rate): # X Input. # y true/target value. # batch_size Batch Size. # epochs Number of iterations. # Learning_rate Learning rate. # number of training examples # number of features numner_of_examples, numner_of_features = X.shape # Initializing weights and bias to zeros. weights = np.zeros((numner_of_features,1)) bias = 0 # Reshaping y. y = y.reshape(numner_of_examples,1) # Empty list to store losses. losses = [] # Training Loop. for epoch in range(epochs): for i in range((numner_of_examples-1)//batch_size + 1): = # Defining batches. SGD. start_i - i * batch_size end_i = start_i + batch_size xb = X[start_i:end_i] yb = y[start_i:end_i] = # Calculating hypothesis/prediction. y_hat = sigmoid(np.dot(xb, weights) + bias) # Getting the gradients of loss w.r.t parameters. dw, db = gradients(xb, yb, y_hat)

# Updating the parameters. weights = weights - learning_rate dw bias = bias - learning_rate * db # Calculating loss and appending it in the list. 1 = loss(y, sigmoid(np.dot(X, weights) + bias)) losses.append(1) = # returning weights, bias and Losses(List). return weights, bias, losses

Make the prediction - def predict(X, W, b): # X Input. # Calculating presictions/y_hat. preds = sigmoid(np.dot(X, W) + b) # Empty List to store predictions. pred_class = [] # Delete the following two lines and replace it with your own for i in preds: pred_class.append() # if y_hat >= 0.5 round up to 1 # if y_hat < 0.5 round down to e ### YOUR CODE HERE 표표 return np.array(pred_class)
Join a community of subject matter experts. Register for FREE to view solutions, replies, and use search function. Request answer by replying!
Post Reply