# -*- coding: utf-8 -*-
"""
Created on Wed Nov 20 09:00:08 2019

@author: Márton
"""

import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from sklearn import mixture
import sklearn.cluster as cluster;

n_samples = 300

# generate random sample, two components
np.random.seed(0)

# generate spherical data centered on (0, 0)
nonshifted_gaussian = np.random.randn(n_samples, 2)

# generate spherical data centered on (20, 20)
shifted_gaussian = np.random.randn(n_samples, 2) + np.array([20, 20])

# generate zero centered stretched Gaussian data
C = np.array([[0., -0.7], [3.5, .7]])
stretched_gaussian = np.dot(np.random.randn(n_samples, 2), C)

# concatenate the two datasets into the final training set
X_train = np.vstack([nonshifted_gaussian, shifted_gaussian])
# np.vstack([shifted_gaussian, stretched_gaussian])

fig = plt.figure(1);
plt.scatter(X_train[:,0],X_train[:,1],s=10);
plt.scatter([0,2],[0,2],s=20,c='red');
plt.title('Plot of Gaussian mixture');
plt.show()

# K-means
n_clus =2;
kmeans = cluster.KMeans(n_clusters=n_clus, random_state=2019);
kmeans.fit(X_train);
kmeans_membership = kmeans.labels_;

# Covariance parametrization
cov_par = ['full', 'tied', 'diag', 'spherical'] 
bic = {}
for par in cov_par:
    clf = mixture.GaussianMixture(n_components=2, covariance_type=par)
    clf.fit(X_train)   
    bic.update({par:clf.bic(X_train)})

# fit a Gaussian Mixture Model with two components
clf = mixture.GaussianMixture(n_components=2, covariance_type='spherical')
clf.fit(X_train)

# display predicted scores by the model as a contour plot
x = np.linspace(-20., 30.)
y = np.linspace(-20., 40.)
X, Y = np.meshgrid(x, y)
XX = np.array([X.ravel(), Y.ravel()]).T
Z = -clf.score_samples(XX)
Z = Z.reshape(X.shape)

CS = plt.contour(X, Y, Z, norm=LogNorm(vmin=1.0, vmax=1000.0),
                 levels=np.logspace(0, 3, 10))
CB = plt.colorbar(CS, shrink=0.8, extend='both')
plt.scatter(X_train[:, 0], X_train[:, 1], .8)
plt.scatter(clf.means_[:,0],clf.means_[:,1],c='red')

plt.title('Negative log-likelihood predicted by a GMM')
plt.axis('tight')
plt.show()

posterior_proba = clf.predict_proba(X_train)