In this notebook we will go through many features of FVGP. We will be primarily concerned with regression over a single dimension output and multiple tasks.

## This first cell has nothing to do with gpCAM, it’s just a function to plot some results later

```import plotly.graph_objects as go
import numpy as np
def plot(x,y,z,data = None):
fig = go.Figure()
fig.add_trace(go.Surface(x = x, y = y,z=z))
if data is not None:
mode='markers'))

fig.update_layout(title='Posterior Mean', autosize=True,
width=800, height=800,
margin=dict(l=65, r=50, b=65, t=90))

fig.show()
```

## Import fvgp and relevant libraries

```import fvgp
from fvgp import gp, fvgp
import numpy as np
import matplotlib.pyplot as plt
```

## Defining some input data and testing points

```def function(x):
data_1 = 100*np.sin(x)+np.cos(x)
data_2 = 5*np.ones(x.shape)
data_3 = 1*np.cos(x/10 + 5)
data_4 = 5*np.sin(x/200)
data_5 = 10*np.cos(x)

return np.column_stack((data_1, data_2, data_3, data_4, data_5))
```
```x_data = np.linspace(-2*np.pi, 10*np.pi,100).reshape(-1,1)
```
```y_data = function(x_data)
```
```x_pred = np.linspace(3*np.pi, 4*np.pi, 100)
```

## Setting up the fvgp multi task object

```obj = fvgp.fvGP(1,1,5,x_data,y_data,
init_hyperparameters = np.array([10,10,10]))
```

## Training our gaussian process regression on given data

```hyper_param_bounds = np.array([[0.0001, 1000],[ 0.0001, 1000],[ 0.0001, 1000]])
obj.train(hyper_param_bounds)
```
```---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
Cell In, line 2
1 hyper_param_bounds = np.array([[0.0001, 1000],[ 0.0001, 1000],[ 0.0001, 1000]])
----> 2 obj.train(hyper_param_bounds)

304 if init_hyperparameters is None:
305     init_hyperparameters = np.array(self.hyperparameters)
--> 307 self.hyperparameters = self._optimize_log_likelihood(
308     init_hyperparameters,
309     np.array(hyperparameter_bounds),
310     method,
311     max_iter,
312     pop_size,
313     tolerance,
314     constraints,
315     local_optimizer,
316     global_optimizer,
319     )
320 self._compute_prior_fvGP_pdf()

477 logger.debug("termination tolerance: {}", tolerance)
478 logger.debug("bounds: {}", hp_bounds)
--> 479 res = differential_evolution(
480     self.log_likelihood,
481     hp_bounds,
482     maxiter=max_iter,
483     popsize = pop_size,
484     tol = tolerance,
485     constraints = constraints,
486     workers = 1,
487 )
488 hyperparameters = np.array(res["x"])
489 Eval = self.log_likelihood(hyperparameters)

File ~/checkouts/readthedocs.org/user_builds/fvgp/envs/latest/lib/python3.8/site-packages/scipy/optimize/_differentialevolution.py:397, in differential_evolution(func, bounds, args, strategy, maxiter, popsize, tol, mutation, recombination, seed, callback, disp, polish, init, atol, updating, workers, constraints, x0, integrality, vectorized)
380 # using a context manager means that any created Pool objects are
381 # cleared up.
382 with DifferentialEvolutionSolver(func, bounds, args=args,
383                                  strategy=strategy,
384                                  maxiter=maxiter,
(...)
395                                  integrality=integrality,
396                                  vectorized=vectorized) as solver:
--> 397     ret = solver.solve()
399 return ret

995 for nit in range(1, self.maxiter + 1):
996     # evolve the population by a generation
997     try:
--> 998         next(self)
999     except StopIteration:
1000         warning_flag = True

1383     feasible = True
1384     cv = np.atleast_2d([0.])
-> 1385     energy = self.func(parameters)
1386     self._nfev += 1
1388 # compare trial and population member

371 def __call__(self, x):
--> 372     return self.f(x, *self.args)

570 x,K = self._compute_covariance_value_product(hyperparameters,self.y_data, self.variances, mean)
571 y = self.y_data - mean
--> 572 sign, logdet = self.slogdet(K)
573 n = len(y)
574 #if sign == 0.0: return (0.5 * (y.T @ x)) + (0.5 * n * np.log(2.0*np.pi))
575 #return (0.5 * (y.T @ x)) + (0.5 * sign * logdet) + (0.5 * n * np.log(2.0*np.pi))

705 if self.compute_device == "cpu":
706     A = torch.from_numpy(A)
--> 707     sign, logdet = torch.slogdet(A)
708     sign = sign.numpy()
709     logdet = logdet.numpy()

KeyboardInterrupt:
```

## Looking at the posterior mean at the test points (remember that we did not define a particularly good kernel)

```task_idx = 1
```
```x_linspace = np.linspace(3*np.pi, 4*np.pi,100)
y_linspace = np.linspace(0,4,100)
x_grid, y_grid = np.meshgrid(x_linspace, y_linspace)
posterior_mean = obj.posterior_mean(np.column_stack((x_grid.flatten(), y_grid.flatten())))
```
```fig = plt.figure(figsize = (10,10))
```<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x7f3a2804daf0> 