# python小作业|notebook|小作业代写|简单作业

Jupyter Server: localPython 3: Idle[-]import numpy as np

# X

[2]X = np.array([              [1, 1, 4, 3],              [1, 2, 3, 7],              [1, 2, 4, 5],              [1, 4, 9, 6],              [1, 5, 6, 3]]             , float);print(X)[[1. 1. 4. 3.] [1. 2. 3. 7.] [1. 2. 4. 5.] [1. 4. 9. 6.] [1. 5. 6. 3.]]

Note that X is an array containing 4 feature vectors each of dimension 3.[3]X.shape

`(5, 4)`

# Pseudoinverse

[4]Xpinv = np.linalg.pinv(X);print(np.round(Xpinv, decimals=3))[[ 1.335 -0.194 0.422 -1.023 0.46 ] [-0.269 0.098 -0.037 -0.106 0.314] [ 0.081 -0.146 -0.035 0.218 -0.118] [-0.167 0.183 0.013 0.08 -0.109]] [5]Xpinv.shape

`(4, 5)`

# Check

[6]print(np.round(np.dot(Xpinv, X), decimals=3))[[ 1. 0. 0. 0.] [ 0. 1. 0. 0.] [-0. -0. 1. -0.] [-0. -0. -0. 1.]] [7]print(np.round(np.dot(X, Xpinv), decimals=3))[[ 0.888 -0.131 0.285 -0.015 -0.027] [-0.131 0.847 0.334 -0.018 -0.032] [ 0.285 0.334 0.274 0.039 0.069] [-0.015 -0.018 0.039 0.998 -0.004] [-0.027 -0.032 0.069 -0.004 0.993]]

Note that, although the above matrix is significantly different from the identity matrix, it is the best in terms of least-squares error.[-]