{ "metadata": { "name": "", "signature": "sha256:0b49de17f9ced4d8ade12c5ca09589bc3d338b9b7abf3696ca1891d4369fff9b" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ { "cell_type": "code", "collapsed": false, "input": [ "import numpy as np\n", "import pylab as pb\n", "import GPy\n", "import scipy.io\n", "import matplotlib.pyplot as plt\n", "%matplotlib inline" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "heading", "level": 6, "metadata": {}, "source": [ "Readind Y is the gene expression(n,t) n= 45038, t= 64, and T matrix is time series (t,)t=64, and S is connectivity matrix between gene expression and TF (n,q) n=45038 , q=69.\n" ] }, { "cell_type": "code", "collapsed": false, "input": [ "Y=np.load(\"/users/suraalrashid/expression.npy\")\n", "t = np.asarray([30,30,30,30,60,60,60,60,90,90,90,90,120,120,120,120,30,30,30,30,60,60,60,60,90,90,90,90,120,120,120,120,30,30,30,30,60,60,60,60,90,90,90,90,120,120,120,120,30,30,30,30,60,60,60,60,90,90,90,90,120,120,120,120])\n", "t = t[:, None]\n", "Y=Y[0:45038,:]\n", "Y=Y[0:100,:]\n", "\n", "\n", "\n", "def con_mat_True_False():\n", " X=np.load(\"/users/suraalrashid/connectivity matrix.npy\")\n", " transcription_factors = X[0,1:70]\n", " annotations = X[1:45039,0]\n", " #X = X[transcription_factors]\n", " \n", " X=X[1:45039,1:70]\n", " return (annotations, X, transcription_factors)\n", "\n", "\n", "annotations,X,transcription_Factors= con_mat_True_False()\n", "# set S to find relationships where p-value is less than 1e-3\n", "#S = data['X'].T<1e-3\n", "S=np.repeat(0,45038*69).reshape(45038,69)\n", "for i in range(45038):\n", " for j in range(69):\n", " S[i,j]=1 if X[i,j] =='1' else 0\n", "S=S[0:100,0:7]\n", "TF=transcription_Factors[0:7]" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 2 }, { "cell_type": "heading", "level": 6, "metadata": {}, "source": [ "Compute SVD processing R is (n,n) , Lambda1(q), make Lambda(q,q) from Lambda1, V is (q,q) " ] }, { "cell_type": "code", "collapsed": false, "input": [ "# step 1, find the SVD of S.\n", "n, q = S.shape\n", "T = Y.shape[1]\n", "\n", "R, Lambda1, V = np.linalg.svd(S)\n", "# Extract first q columns for Q\n", "Q = R[:, :q]\n", "# remaining columns for U\n", "U = R[:, q:]\n", "print n,q,T\n", "Lambda=np.zeros(q*q).reshape(q,q)\n", "for i in range(q):\n", " Lambda[i,i]=Lambda1[i]\n" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "100 7 64\n" ] } ], "prompt_number": 3 }, { "cell_type": "heading", "level": 6, "metadata": {}, "source": [ "To compute sigma2 must be compute Y_u fron U matrix " ] }, { "cell_type": "code", "collapsed": false, "input": [ "# Find sigma2 by looking at variance of y_u\n", "Y_u = np.dot(U.T, Y)\n", "sigma2 = 1./(T*(n-q))*(Y_u*Y_u).sum()\n", "print \"sigma2 found as\", sigma2\n" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "sigma2 found as 0.245147250992\n" ] } ], "prompt_number": 4 }, { "cell_type": "markdown", "metadata": {}, "source": [ "To getting X and Y, X make from 3 matrix the first matrix is t (time series), the second matrix for coregionalization for 2 strains and 2 mutations, and the third matrix for coregionalization for transcription factors (q), X is (t*q,3 ), \n", "\n", "\n", "\n", "\n", "and Y is made from Q where it's shape is (q*t,) " ] }, { "cell_type": "code", "collapsed": false, "input": [ "# Prepare the data for processing in GPy\n", "Y_q = np.dot(Q.T, Y) # project data onto the principal subspace of X \n", "\n", "# Generate the input associated with each Y, the TF and the time point.\n", "s_m=np.asarray([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n", " 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,\n", " 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,\n", " 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3])\n", "x0, x1= np.asarray( np.meshgrid( t.flatten(),np.arange(q) ) )\n", "x2,_= np.asarray( np.meshgrid( s_m.flatten(),np.arange(q) ) )\n", "#print x0.shape,x1.shape, x2.shape\n", "#print x0,x1,x2\n", "Xt=np.hstack([t.flatten()[:, None],s_m.flatten()[:,None]])\n", "#np.save(\"/home/sura/GPy/TF/Xt.npy\",Xt)\n", "\n", "X = np.hstack([x0.flatten()[:, None], x2.flatten()[:, None],x1.flatten()[:,None]])\n", "#np.save(\"/home/sura/GPy/TF/Xt.npy\",X)\n", "np.save(\"Xt.npy\",X)\n", "\n", "#print X.shape, X\n", "y = Y_q.flatten()[:, None]\n", "#print Y_q.flatten()[:, None].shape" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 5 }, { "cell_type": "heading", "level": 6, "metadata": {}, "source": [ "Saveing the V , Lambda and Q to compute the mean and cov. later" ] }, { "cell_type": "code", "collapsed": false, "input": [ "np.save(\"V.npy\", V)\n", "np.save(\"Lambda.npy\", Lambda)\n", "np.save(\"Q.npy\", Q)" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 6 }, { "cell_type": "code", "collapsed": false, "input": [ "kern = GPy.kern.RBF(1, active_dims=[0])*GPy.kern.Coregionalize(1,4, rank=4,active_dims=[1])*GPy.kern.Coregionalize(1,q, rank=q,active_dims=[2])\n", "#kern = GPy.kern.RBF(1, active_dims=[0])*GPy.kern.Coregionalize(1,q, rank=q,active_dims=[1])*GPy.kern.Coregionalize(1,4, rank=4,active_dims=[2])\n", "\n", "m = GPy.models.GPRegression(X, y, kern)\n", "#m.mul.rbf.lengthscale = 50\n", "m.mul.mul.rbf.lengthscale=50\n", "m.Gaussian_noise.variance = sigma2\n", "#m.optimize(messages=True)" ], "language": "python", "metadata": {}, "outputs": [], "prompt_number": 7 }, { "cell_type": "code", "collapsed": false, "input": [ "#import GPy.inference.latent_function_inference.inference_method\n", "print m\n", "K1= m.kern.mul.coregion.K(Xt)#self.X)\n", "print K1.shape\n" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ " GP_regression. | Value | Constraint | Prior | Tied to\n", " \u001b[1mmul.mul.rbf.variance \u001b[0;0m | 1.0 | +ve | | \n", " \u001b[1mmul.mul.rbf.lengthscale\u001b[0;0m | 50.0 | +ve | | \n", " \u001b[1mmul.mul.coregion.W \u001b[0;0m | (4, 4) | | | \n", " \u001b[1mmul.mul.coregion.kappa \u001b[0;0m | (4,) | +ve | | \n", " \u001b[1mmul.coregion.W \u001b[0;0m | (7, 7) | | | \n", " \u001b[1mmul.coregion.kappa \u001b[0;0m | (7,) | +ve | | \n", " \u001b[1mGaussian_noise.variance\u001b[0;0m | 0.245147250992 | +ve | | \n", "(64, 64)\n" ] } ], "prompt_number": 8 }, { "cell_type": "code", "collapsed": false, "input": [ "si=np.dot(m.mul.coregion.W,m.mul.coregion.W.T)+np.diag(m.mul.coregion.kappa)\n", "#we have si , V, Lambda \n", "#np.save(\"/home/sura/GPy/TF/Si.npy\", si)\n", "np.save(\"Si.npy\", si)\n", "print si.shape" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "(7, 7)\n" ] } ], "prompt_number": 9 }, { "cell_type": "code", "collapsed": false, "input": [ "for i in range(q):\n", " m.plot(fixed_inputs=[(1, 0),(2,i),],linecol='Red',fillcol='lightBlue',which_data_rows=range(0,16*q)) #,(1,3)])#,(1,0),(1,0),(1,0),(1,1)])\n", " m.plot(fixed_inputs=[(1, 1),(2,i)],linecol='Blue',which_data_rows=range(16*q,32*q),newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,2)])\n", " m.plot(fixed_inputs=[(1, 2),(2,i)],linecol='Green',which_data_rows=range(32*q,48*q),newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,3)])\n", " m.plot(fixed_inputs=[(1, 3),(2,i)],linecol='Yellow',which_data_rows=range(48*q,64*q),newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,4)])\n", " pb.xlabel(\"Time series\")\n", " pb.ylabel(\"connectivity info. between \\n Transcripton Factor {} \\n and group from genes\".format (transcription_Factors[i]))\n", " " ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "heading", "level": 6, "metadata": {}, "source": [ "The below code show how compute mean and covariance, and then plot model , from the last block begin the execuation " ] }, { "cell_type": "code", "collapsed": false, "input": [ "X.shape\n", "G=np.zeros(200).reshape(200)\n", "print G.shape\n", "\n", "import sys\n", "import warnings\n", "from GPy import kern\n", "from GPy.util.linalg import dtrtrs\n", "#from GPy.model import Model\n", "#from parameterization import ObsAr\n", "from GPy import likelihoods\n", "from GPy.util import diag\n", "from GPy.util.linalg import pdinv, pdinv_post, dpotrs, tdot\n", "from GPy.util.linalg import pdinv, dpotrs, dpotri, symmetrify, jitchol\n", "from GPy.likelihoods.gaussian import Gaussian\n", "#from GPy.inference.latent_function_inference import exact_gaussian_inference, expectation_propagation, LatentFunctionInference\n", "#from parameterization.variational import VariationalPosterior\n", "#from GPy.plotting.matplot_dep.base_plots import x_frame1D\n", "#import Tango\n", "#from base_plots import gpplot, x_frame1D, x_frame2D\n", "from GPy.plotting.matplot_dep.base_plots import gpplot, x_frame1D, x_frame2D\n", "#from ...util.misc import param_to_array\n", "from GPy.util.misc import param_to_array\n", "#from ...models.gp_coregionalized_regression import GPCoregionalizedRegression\n", "#from GPy.models.gp_coregionalized_regression import GPCoregionalizedRegression\n", "#from ...models.sparse_gp_coregionalized_regression import SparseGPCoregionalizedRegression\n", "#from GPy.models.sparse_gp_coregionalized_regression import SparseGPCoregionalizedRegression\n", "\n", "def _raw_predict_post(self, _Xnew, full_cov=False, kern=None):\n", "\n", " YYT_factor = self.Y\n", " \n", " si=np.load(\"Si.npy\")\n", " Vv=np.load(\"V.npy\")\n", " Lambda=np.load(\"Lambda.npy\")\n", " Q=np.load(\"Q.npy\")\n", " t=np.load(\"Xt.npy\")\n", " t=t[:64,:3]\n", " t[:,0]=self.X[0:64,0]\n", " t[:,1]=_Xnew[0,1]\n", " t[:,2]=_Xnew[0,2]\n", "\n", " #K1= self.kern.mul.rbf.K(t)#self.X)\n", " K1=self.mul.mul.coregion.K(t)\n", " #print 'K.shape',K.shape \n", "\n", "\n", " #np.save(\"/Users/suraAlrashid/GPy/TF/K.npy\",K)\n", "\n", "\n", " te=np.dot(np.dot(np.dot(Lambda,Vv.T), si),np.dot(Vv,Lambda))\n", " K=np.kron(K1,te)\n", "\n", " Ky = K.copy()\n", " diag.add(Ky, self.likelihood.gaussian_variance(self.Y_metadata))\n", "\n", " LW = pdinv_post(Ky)\n", " \n", " YYT_factor=YYT_factor.reshape(YYT_factor.shape[0],1)\n", "\n", " #print 'YYT_factor',YYT_factor.shape\n", " alpha, _ = dpotrs(LW, YYT_factor, lower=1)\n", "\n", "\n", " \"\"\"K = kern.K(X)\"\"\"\n", " self.woodbury_chol=LW \n", " self.woodbury_vector=alpha \n", " self.woodbury_inv, _ = dpotri(self.woodbury_chol, lower=1)\n", "\n", " symmetrify(self.woodbury_inv)\n", "\n", " te1=si\n", " te2=np.dot( si,np.dot(Vv,Lambda))\n", " te3=np.dot(np.dot(Lambda,Vv.T), si)\n", " te=np.dot(np.dot(np.dot(Lambda,Vv.T), si),np.dot(Vv,Lambda))\n", "\n", "\n", "\n", " \"\"\"mu=self.posterior.mean(self.likelihood)\n", " var=self.posterior.covariance(self.likelihood)\"\"\"\n", "\n", " #Kx = self.kern.mul.rbf.K(t,_Xnew)\n", " Kx=self.mul.mul.coregion.K(t,_Xnew)\n", " #print 'Kx',Kx\n", "\n", " mu = np.dot(np.kron(Kx.T,te2), self.woodbury_vector)\n", " #mu = np.dot(np.kron(K1,te2), self.woodbury_vector)\n", " #print 'mu.',mu\n", "\n", " #Kxx = self.kern.mul.rbf.K(_Xnew)\n", " Kxx=self.mul.mul.coregion.K(_Xnew)\n", " var = np.atleast_3d(np.kron(Kxx,te1)) - np.tensordot(np.dot(np.atleast_3d(self.woodbury_inv).T, np.kron(Kx,te2)), np.kron(Kx,te3), [1,0]).T\n", " #var = np.atleast_3d(np.kron(K1,te1)) - np.tensordot(np.dot(np.atleast_3d(self.woodbury_inv).T, np.kron(K1,te2)), np.kron(K1,te3), [1,0]).T\n", "\n", " #print 'var',var.shape\n", " #K = self.kern.mul.rbf.K(t)\n", "\n", " #pb.figure()\n", " #full_K = np.vstack([np.hstack([K, Kx]), np.hstack([Kx.T, Kxx])])\n", "\n", " #pb.imshow(full_K)\n", " #pb.colorbar\n", " return mu, var\n", "\n", "\n", "def predict_post(self, Xnew, full_cov=False, Y_metadata=None, kern=None):\n", " #predict the latent function values\n", " mu, var = _raw_predict_post(self,Xnew, full_cov=full_cov, kern=kern)\n", " #print mu.shape, var.shape\n", " # now push through likelihood\n", " mean, var = self.likelihood.predictive_values(mu, var, full_cov, Y_metadata)\n", " return mean, var\n", "\n", "\n", "def predict_quantiles_post(self, X, quantiles=(2.5, 97.5), Y_metadata=None):\n", " m, v = _raw_predict_post(self,X, full_cov=False)\n", " #print 'predict_quantiles_post, sura '\n", " return self.likelihood.predictive_quantiles(m, v, quantiles, Y_metadata)\n", "\n", "\n", "def plot_postt(model, newfig=True,plot_limits=None, which_data_rows='all',\n", " which_data_ycols='all', fixed_inputs=[],\n", " levels=20, samples=0, fignum=None, ax=None, resolution=None,\n", " plot_raw=False,\n", " linecol='darkBlue',fillcol='lightBlue', Y_metadata=None, data_symbol='kx'):\n", " #deal with optional arguments\n", " if which_data_rows == 'all':\n", " which_data_rows = slice(None)\n", " if which_data_ycols == 'all':\n", " which_data_ycols = np.arange(model.output_dim)\n", " #if len(which_data_ycols)==0:\n", " #raise ValueError('No data selected for plotting')\n", " if ax is None:\n", " if newfig:\n", " fig = pb.figure(num=fignum)\n", " ax = fig.add_subplot(111)\n", " else :\n", " \n", " fig = pb.gcf()\n", " ax = fig.add_subplot(111)\n", "\n", " if hasattr(model, 'has_uncertain_inputs') and model.has_uncertain_inputs():\n", " X = model.X.mean\n", " X_variance = param_to_array(model.X.variance)\n", " else:\n", " X = model.X\n", " X, Y = param_to_array(X, model.Y)\n", " if hasattr(model, 'Z'): Z = param_to_array(model.Z)\n", "\n", " #work out what the inputs are for plotting (1D or 2D)\n", " fixed_dims = np.array([i for i,v in fixed_inputs])\n", " free_dims = np.setdiff1d(np.arange(model.input_dim),fixed_dims)\n", " plots = {}\n", " #one dimensional plotting\n", " #print len(free_dims)\n", " if len(free_dims) == 1:\n", "\n", " #define the frame on which to plot\n", " \n", " t=np.load(\"Xt.npy\")\n", " t=t[:64,:3]\n", "\n", " Xnew, xmin, xmax = x_frame1D(X[:,free_dims], plot_limits=plot_limits, resolution=resolution or 200)\n", " #print Xnew.shape,'XNew'\n", " Xgrid = np.empty((Xnew.shape[0],model.input_dim))\n", " Xgrid[:,free_dims] = Xnew\n", " for i,vz in fixed_inputs:\n", " Xgrid[:,i] = vz\n", " #print 'Xgrid',Xgrid\n", " #print 'xgrid',Xgrid.shape\n", " #make a prediction on the frame and plot it\n", " ii= [v for i,v in fixed_inputs]\n", " ii =ii[1]\n", " if plot_raw:\n", " m, v = _raw_predict_post(model,Xgrid)\n", " np.save(\"m.npy\",m[200*ii:200*(ii+1), 0])\n", " \n", "\n", " lower = m - 2*np.sqrt(v)\n", " upper = m + 2*np.sqrt(v)\n", " else:\n", " if isinstance(model,GPCoregionalizedRegression) or isinstance(model,SparseGPCoregionalizedRegression):\n", " meta = {'output_index': Xgrid[:,-1:].astype(np.int)}\n", " else:\n", " meta = None\n", " m, v = predict_post(model,Xgrid, full_cov=False, Y_metadata=meta)\n", " #print 'meta',meta\n", " lower, upper = predict_quantiles_post(model,Xgrid, Y_metadata=meta)\n", "\n", " np.save(\"m.npy\",m[200*ii:200*(ii+1), 0])\n", " #print 'm[200*ii:200*(ii+1)',m[200*ii:200*(ii+1),0], ii\n", " #rr=m[200*ii:200*(ii+1),0]-m[200*(ii+1):200*(ii+1+1),0]\n", " #print rr\n", " #print m.shape, Xnew.shape\n", " #if not plot_raw: plots['dataplot'] = ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], data_symbol, mew=1.5)\n", " \n", " # the end block\n", " \n", "\n", " \n", " for d in which_data_ycols:\n", " \n", " #print 'm[:d]',m[:,d].shape, 'Xnew',Xnew.shape, 'lower[:,d] ,upper[: ,d]', lower[:,d].shape ,upper[: ,d].shape\n", " plots['gpplot'] = gpplot(Xnew, m[200*ii:200*(ii+1), d], lower[200*ii:200*(ii+1), d], upper[200*ii:200*(ii+1), d], ax=ax, edgecol=linecol, fillcol=fillcol)\n", " \n", " \n", " #print 'X[which_data_rows,free_dims]',X[which_data_rows,free_dims].shape,X[which_data_rows,free_dims]\n", "\n", " #print 'Y[which_data_rows, d]',Y[which_data_rows, d].shape,Y[which_data_rows, d]\n", "\n", " if fixed_inputs==[(1,0),(2,0)] :\n", " \n", " if not plot_raw: plots['dataplot'] =ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', color=linecol,mew=1,label=\" 129 Ntg\")\n", " if fixed_inputs==[(1,0),(2,1)] :\n", " if not plot_raw: plots['dataplot'] =ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', color=linecol,mew=1,label=\" 129 SOD1\")\n", " if fixed_inputs==[(1,0),(2,2)] :\n", " if not plot_raw: plots['dataplot'] =ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', color=linecol,mew=1,label=\"C57 Ntg\")\n", " if fixed_inputs==[(1,0),(2,3)] :\n", " if not plot_raw: plots['dataplot'] =ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', color=linecol,mew=1,label=\"C57 SOD1\") \n", " else :\n", " if not plot_raw: plots['dataplot'] =ax.plot(X[which_data_rows,free_dims], Y[which_data_rows, d], 'kx', color=linecol,mew=1) \n", " \n", " \n", " ax.legend(bbox_to_anchor=(1,1.4)) \n", " #optionally plot some samples\n", " if samples: #NOTE not tested with fixed_inputs\n", " Ysim = model.posterior_samples(Xgrid, samples)\n", " for yi in Ysim.T:\n", " plots['posterior_samples'] = ax.plot(Xnew, yi[:,None], Tango.colorsHex['darkBlue'], linewidth=0.25)\n", " #ax.plot(Xnew, yi[:,None], marker='x', linestyle='--',color=Tango.colorsHex['darkBlue']) #TODO apply this line for discrete outputs.\n", "\n", "\n", " #add error bars for uncertain (if input uncertainty is being modelled)\n", " if hasattr(model,\"has_uncertain_inputs\") and model.has_uncertain_inputs():\n", " plots['xerrorbar'] = ax.errorbar(X[which_data_rows, free_dims].flatten(), Y[which_data_rows, which_data_ycols].flatten(),\n", " xerr=2 * np.sqrt(X_variance[which_data_rows, free_dims].flatten()),\n", " ecolor='k', fmt=None, elinewidth=.5, alpha=.5)\n", "\n", "\n", " #set the limits of the plot to some sensible values\n", " ymin, ymax = min(np.append(Y[which_data_rows, which_data_ycols].flatten(), lower)), max(np.append(Y[which_data_rows, which_data_ycols].flatten(), upper))\n", " ymin, ymax = ymin - 0.1 * (ymax - ymin), ymax + 0.1 * (ymax - ymin)\n", " #print ymin,ymax\n", " ax.set_xlim(xmin, xmax)\n", " ax.set_ylim(-3,3)\n", " #ax.set_ylim(ymin, ymax)\n", "\n", " #add inducing inputs (if a sparse model is used)\n", " if hasattr(model,\"Z\"):\n", " #Zu = model.Z[:,free_dims] * model._Xscale[:,free_dims] + model._Xoffset[:,free_dims]\n", " if isinstance(model,SparseGPCoregionalizedRegression):\n", " Z = Z[Z[:,-1] == Y_metadata['output_index'],:]\n", " Zu = Z[:,free_dims]\n", " z_height = ax.get_ylim()[0]\n", " plots['inducing_inputs'] = ax.plot(Zu, np.zeros_like(Zu) + z_height, 'r|', mew=1.5, markersize=12)\n", " else:\n", " raise NotImplementedError, \"Cannot define a frame with more than two input dimensions\"\n", " return plots\n", "#\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "\n", "for i in range(q):\n", " plot_postt(m,fixed_inputs=[(1, 0),(2,i)],linecol='Red',which_data_rows=range(0,16*q))#,1)])\n", " plot_postt(m,fixed_inputs=[(1, 1),(2,i)],linecol='Blue',which_data_rows=range(16*q,32*q))#,newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,2)])\n", " plot_postt(m,fixed_inputs=[(1, 2),(2,i)],linecol='Green',which_data_rows=range(32*q,48*q))#,newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,3)])\n", " plot_postt(m,fixed_inputs=[(1, 3),(2,i)],linecol='Yellow',which_data_rows=range(48*q,64*q))#,newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,4)])\n", " \n", " mm=np.load(\"m.npy\")\n", " mm.reshape(-1,1)\n", " \n", " G= np.hstack((G,mm))\n", " " ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "(200,)\n" ] }, { "ename": "NameError", "evalue": "global name 'GPCoregionalizedRegression' is not defined", "output_type": "pyerr", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m\n\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 259\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 260\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mq\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 261\u001b[0;31m \u001b[0mplot_postt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfixed_inputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlinecol\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Red'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mwhich_data_rows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m16\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mq\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;31m#,1)])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 262\u001b[0m \u001b[0mplot_postt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfixed_inputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlinecol\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Blue'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mwhich_data_rows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m16\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mq\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mq\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;31m#,newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,2)])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 263\u001b[0m \u001b[0mplot_postt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mm\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfixed_inputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlinecol\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Green'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mwhich_data_rows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mq\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m48\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mq\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;31m#,newfig=False) #,(1,0)])#,(1,0),(1,0),(1,0),(1,3)])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m\u001b[0m in \u001b[0;36mplot_postt\u001b[0;34m(model, newfig, plot_limits, which_data_rows, which_data_ycols, fixed_inputs, levels, samples, fignum, ax, resolution, plot_raw, linecol, fillcol, Y_metadata, data_symbol)\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[0mupper\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mm\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;36m2\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msqrt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 181\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 182\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mGPCoregionalizedRegression\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mSparseGPCoregionalizedRegression\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 183\u001b[0m \u001b[0mmeta\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m'output_index'\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mXgrid\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mastype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 184\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mNameError\u001b[0m: global name 'GPCoregionalizedRegression' is not defined" ] }, { "metadata": {}, "output_type": "display_data", "png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEACAYAAABI5zaHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADB5JREFUeJzt3F+IXNUdwPHvNBsFoamVhTwkkUAM/ikoKsZYEa9E6OpD\nAxYaom2xCs1LbF9KY3xo5sWWvBSRgIY0Sp/Mg/oQS0iw1cESTDSgMalJyG4byB8Rta1IEZol04dz\nkxknu7l3Zu7eTX77/cDC3L0nN8dD+O7dc2cESZIkSZIkSZIkSZIkSZKk0F4CPgUOXWLM88Bx4CBw\nex2TkiQN5z5SsKeL+8PArvz13cC+OiYlSRreUqaP+4vAmq7jo8DCmZ6QJGl636rgGouAk13Hp4DF\nFVxXkjSgKuIO0Og5bld0XUnSAEYquMZpYEnX8eL8e9+wbNmy9sTERAV/nSTNKRPADf3+oSru3HcC\nP8tfrwT+Q3p3zTdMTEzQbrf9arfZtGnTrM/hcvlyLVwL1+LSX8CyQcJc5s79FeB+YJS0t74JmJ+f\n20p6p8zDwDjwX+Dng0xEklSdMnFfW2LM+mEnIkmqTlUPVNWHLMtmewqXDdeiw7XocC2G1/sul5nU\nzvePJEklNRoNGKDV3rlLUkDGXZICMu6SFJBxl6SAjLskBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIy\n7pIUkHGXpICMuyQFZNwlKSDjLkkBGXdJCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk3CUpIOMuSQEZ\nd0kKyLhLUkDGXZICMu6SFJBxl6SAjLskBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIqE/cx4ChwHNgw\nxflRYDfwIXAYeLyqyUmSBtMoOD8POAY8CJwG3gfWAke6xjSBq4GNpNAfAxYCkz3Xarfb7eFnLElz\nSKPRgOJWX6Tozn0FMA6cAM4CO4DVPWM+ARbkrxcAX3Bx2CVJNRopOL8IONl1fAq4u2fMNuAt4Azw\nbeDHlc1OkjSQoriX2Ud5hrTfngHLgDeB24Cvegc2m80Lr7MsI8uycrOUpDmi1WrRarWGvk7RPs5K\n0p76WH68ETgHbO4aswt4FtibH/+V9OD1QM+13HOXpD7N1J77AWA5sBS4ClgD7OwZc5T0wBXSg9Qb\ngX/0OxFJUnWKtmUmgfXAHtI7Z7aT3imzLj+/Ffgd8DJwkPTD4jfAv2ZispKkcvq+1R+C2zKS1KeZ\n2paRJF2BjLskBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIy7pIUkHGXpICMuyQFZNwlKSDjLkkBGXdJ\nCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk3CUpIOMuSQEZd0kKyLhLUkDGXZICMu6SFJBxl6SAjLsk\nBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIy7pIUkHGXpICMuyQFZNwlKaAycR8DjgLHgQ3TjMmAD4DD\nQKuKiUmSBtcoOD8POAY8CJwG3gfWAke6xlwL7AV+AJwCRoHPp7hWu91uDztfSZpTGo0GFLf6IkV3\n7iuAceAEcBbYAazuGfMo8Bop7DB12CVJNSqK+yLgZNfxqfx73ZYD1wFvAweAn1Y2O0nSQEYKzpfZ\nR5kP3AGsAq4B3gX2kfboJUmzoCjup4ElXcdL6Gy/nHeStBXzdf71DnAbU8S92WxeeJ1lGVmW9Ttf\nSQqt1WrRarWGvk7RJv0I6YHqKuAM8B4XP1C9CdhCeqB6NbAfWAN83HMtH6hKUp8GfaBadOc+CawH\n9pDeObOdFPZ1+fmtpLdJ7gY+As4B27g47JKkGvX902AI3rlLUp9m6q2QkqQrkHGXpICMuyQFZNwl\nKSDjLkkBGXdJCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk3CUpIOMuSQEZd0kKyLhLUkDGXZICMu6S\nFJBxl6SAjLskBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIy7pIUkHGXpICMuyQFZNwlKSDjLkkBGXdJ\nCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk3CUpoDJxHwOOAseBDZcYdxcwCTxSwbwkSUMoivs8YAsp\n8LcAa4Gbpxm3GdgNNKqcoCSpf0VxXwGMAyeAs8AOYPUU454CXgU+q3JykqTBFMV9EXCy6/hU/r3e\nMauBF/LjdjVTkyQNqijuZUL9HPB0PraB2zKSNOtGCs6fBpZ0HS8h3b13u5O0XQMwCjxE2sLZ2Xux\nZrN54XWWZWRZ1tdkJSm6VqtFq9Ua+jpFd9kjwDFgFXAGeI/0UPXINONfBt4AXp/iXLvddsdGkvrR\naDRggB2Rojv3SWA9sIf0jpjtpLCvy89v7fcvlCTNvDr3x71zl6Q+DXrn7idUJSkg4y5JARl3SQrI\nuEtSQMZdkgIy7pIUkHGXpICMuyQFZNwlKSDjLkkBGXdJCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk\n3CUpIOMuSQEZd0kKyLhLUkDGXZICMu6SFJBxl6SAjLskBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIy\n7pIUkHGXpICMuyQFZNwlKSDjLkkBGXdJCsi4S1JAZeM+BhwFjgMbpjj/GHAQ+AjYC9xayewkSQNp\nlBgzDzgGPAicBt4H1gJHusbcA3wMfEn6QdAEVvZcp91ut4ecriTNLY1GA8q1+hvK3LmvAMaBE8BZ\nYAewumfMu6SwA+wHFvc7EUlSdcrEfRFwsuv4VP696TwJ7BpmUpKk4YyUGNPPXsoDwBPAvVOdbDab\nF15nWUaWZX1cWpLia7VatFqtoa9TZh9nJWkPfSw/3gicAzb3jLsVeD0fNz7Fddxzl6Q+zeSe+wFg\nObAUuApYA+zsGXM9Kew/YeqwS5JqVGZbZhJYD+whvXNmO+mdMuvy81uB3wLfBV7Iv3eW9CBWkjQL\n+r7VH4LbMpLUp5nclpEkXWGMuyQFZNwlKSDjLkkBGXdJCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk\n3CUpIOMuSQEZd0kKyLhLUkDGXZICMu6SFJBxl6SAjLskBWTcJSkg4y5JARl3SQrIuEtSQMZdkgIy\n7pIUkHGXpICMuyQFZNwlKSDjLkkBGXdJCsi4S1JAxl2SAjLukhSQcZekgIy7JAVk3CUpoDJxHwOO\nAseBDdOMeT4/fxC4vZqpSZIGVRT3ecAWUuBvAdYCN/eMeRi4AVgO/AJ4oeI5htNqtWZ7CpcN16LD\ntehwLYZXFPcVwDhwAjgL7ABW94z5IfCn/PV+4FpgYXVTjMd/uB2uRYdr0eFaDK8o7ouAk13Hp/Lv\nFY1ZPPzUJEmDKop7u+R1GgP+OUnSDOiNcq+VQJO05w6wETgHbO4a8yLQIm3ZQHr4ej/wac+1xoFl\ng09VkuakCdJzzUqN5BdeClwFfMjUD1R35a9XAvuqnoQkqXoPAcdId94b8++ty7/O25KfPwjcUevs\nJEmSJA3GDz11FK3FY6Q1+AjYC9xa39RqV+bfBcBdwCTwSB2TmgVl1iEDPgAOk55nRVW0FqPAbtJ2\n8GHg8dpmVr+XSM8pD11izKx2cx5pe2YpMJ/iPfq7ibtHX2Yt7gG+k78eY26vxflxbwF/Bn5U1+Rq\nVGYdrgX+TuftxKN1Ta5mZdaiCfw+fz0KfEF6DhjRfaRgTxf3vrtZ9f9bxg89dZRZi3eBL/PX+4n7\n+YAyawHwFPAq8FltM6tXmXV4FHiN9HkRgM/rmlzNyqzFJ8CC/PUCUtwna5pf3f4G/PsS5/vuZtVx\n90NPHWXWotuTdH4yR1P238VqOv/7ioiflSizDsuB64C3gQPAT+uZWu3KrMU24HvAGdJWxK/qmdpl\nqe9uVv0rjh966ujnv+kB4Ang3hmay2wrsxbPAU/nYxsUfwbjSlRmHeaT3nG2CriG9NvdPtJeayRl\n1uIZ0nZNRvqMzJvAbcBXMzety1pf3aw67qeBJV3HS+j8ejndmMX596IpsxaQHqJuI+25X+rXsitZ\nmbW4k84H4UZJb8E9C+yc8dnVp8w6nCRtxXydf71DClq0uJdZi+8Dz+avJ4B/AjeSfqOZa2a9m37o\nqaPMWlxP2ndcWevM6ldmLbq9TMx3y5RZh5uAv5AeOF5DesB2S31TrE2ZtfgDsCl/vZAU/+tqmt9s\nWEq5B6qz1k0/9NRRtBZ/JD0k+iD/eq/uCdaozL+L86LGHcqtw69J75g5BPyy1tnVq2gtRoE3SJ04\nRHrYHNUrpGcL/yP99vYEc7ebkiRJkiRJkiRJkiRJkiRJkiRJkq4E/we+jwyIuTI3ZAAAAABJRU5E\nrkJggg==\n", "text": [ "" ] } ], "prompt_number": 11 }, { "cell_type": "code", "collapsed": false, "input": [ "#G= np.hstack((G,mm))\n", "\n", "print G.shape" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [ "print G.shape\n", "G=G.reshape(-1,q+1)\n", "print G.shape" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [ "G1 = G[:,1:q+1]\n", "G1.shape\n", "F=np.dot(G1,np.dot(V.T,Lambda))\n", "F.shape\n", "pb.plot(F[:,0])" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [ "print G1.shape" ], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [], "language": "python", "metadata": {}, "outputs": [] }, { "cell_type": "code", "collapsed": false, "input": [], "language": "python", "metadata": {}, "outputs": [] } ], "metadata": {} } ] }