Skip to content
Snippets Groups Projects
Commit 8b4a8c08 authored by Michele Nottoli's avatar Michele Nottoli
Browse files

Some old update.

parent 2d5d5431
No related branches found
No related tags found
No related merge requests found
Pipeline #2259 failed
...@@ -160,6 +160,14 @@ class LeastSquare(AbstractFitting): ...@@ -160,6 +160,14 @@ class LeastSquare(AbstractFitting):
result += vector * c result += vector * c
return result return result
def dummy_extrapolate(self, _):
coefficients = np.zeros(len(self.A))
coefficients[-1] = 1.0
result = np.zeros(self.gammas[0].shape)
for c, vector in zip(coefficients, self.gammas):
result += vector * c
return result
class QuasiTimeReversible(AbstractFitting): class QuasiTimeReversible(AbstractFitting):
"""Quasi time reversible fitting scheme.""" """Quasi time reversible fitting scheme."""
...@@ -167,7 +175,6 @@ class QuasiTimeReversible(AbstractFitting): ...@@ -167,7 +175,6 @@ class QuasiTimeReversible(AbstractFitting):
supported_options = { supported_options = {
"normalize": False, "normalize": False,
"regularization": 1e-4, "regularization": 1e-4,
"full_second_order": False
} }
def __init__(self, **kwargs): def __init__(self, **kwargs):
...@@ -248,6 +255,7 @@ class PolynomialRegression(AbstractFitting): ...@@ -248,6 +255,7 @@ class PolynomialRegression(AbstractFitting):
supported_options = { supported_options = {
"normalize": False, "normalize": False,
"regularization": 1e-4, "regularization": 1e-4,
"full_second_order": False,
"ref": -1, "ref": -1,
"order": 1} "order": 1}
...@@ -290,29 +298,31 @@ class PolynomialRegression(AbstractFitting): ...@@ -290,29 +298,31 @@ class PolynomialRegression(AbstractFitting):
gammas.append( gammas.append(
gamma_list[i].flatten() - self.gamma_ref) gamma_list[i].flatten() - self.gamma_ref)
d = np.array(descriptors, dtype=np.float64).T D = np.array(descriptors, dtype=np.float64).T
gammas = np.array(gammas, dtype=np.float64).T gammas = np.array(gammas, dtype=np.float64).T
else: else:
d = np.array(descriptor_list, dtype=np.float64).T D = np.array(descriptor_list, dtype=np.float64).T
gammas = np.reshape(gamma_list, gammas = np.reshape(gamma_list,
(len(gamma_list), self.gamma_shape[0]*self.gamma_shape[1])).T (len(gamma_list), self.gamma_shape[0]*self.gamma_shape[1])).T
norm = 1.0 norm = 1.0
if self.options["normalize"]: if self.options["normalize"]:
norm = np.linalg.norm(d) norm = np.linalg.norm(D)
if self.options["order"] >= 2: if self.options["order"] >= 2:
if self.options["full_second_order"]: if self.options["full_second_order"]:
outer = np.outer(d, d) O = np.empty((D.shape[0]**2, D.shape[1]))
d = np.vstack((d, outer.flatten())) for i in range(D.shape[1]):
O[:, i] = np.outer(D[:, i], D[:, i]).flatten()
D = np.vstack((D, O))
else: else:
d = np.vstack((d, d**2)) D = np.vstack((D, D**2))
A = d.T @ d A = D.T @ D
if self.options["regularization"] > 0.0: if self.options["regularization"] > 0.0:
A += np.identity(len(A))*self.options["regularization"]**2*norm**2 A += np.identity(len(A))*self.options["regularization"]**2*norm**2
self.matrix = d @ np.linalg.inv(A) @ gammas.T self.matrix = D @ np.linalg.solve(A, gammas.T)
def extrapolate(self, descriptor): def extrapolate(self, descriptor):
"""Apply the matrix to the current descriptor.""" """Apply the matrix to the current descriptor."""
...@@ -320,7 +330,13 @@ class PolynomialRegression(AbstractFitting): ...@@ -320,7 +330,13 @@ class PolynomialRegression(AbstractFitting):
descriptor -= self.descriptor_ref descriptor -= self.descriptor_ref
descriptor = np.array([descriptor]) descriptor = np.array([descriptor])
if self.options["order"] >= 2: if self.options["order"] >= 2:
descriptor = np.concatenate([descriptor, descriptor**2], axis=1) if self.options["full_second_order"]:
descriptor = np.concatenate(
[descriptor,
np.outer(descriptor,descriptor).flatten()],
axis=1)
else:
descriptor = np.concatenate([descriptor, descriptor**2], axis=1)
gamma = descriptor @ self.matrix gamma = descriptor @ self.matrix
if self.options["ref"]: if self.options["ref"]:
gamma += self.gamma_ref gamma += self.gamma_ref
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment