#!/usr/bin/env python3
# source
# https://scikit-learn.org/stable/auto_examples/exercises/plot_iris_exercise.html

import matplotlib.pyplot as plt
import numpy as np

from sklearn import datasets, svm

iris = datasets.load_iris()    # Iris (flower) dataset
X = iris.data
y = iris.target
#print(X)
#print(y)

# Iris has three classes; 0, 1, 2
# and four attributes;    0, 1, 2, 3    
X = X[y != 0, :2]   # attributes: 0, 1
y = y[y != 0]       # classes   : 1, 2

n_sample = len(X)

np.random.seed(0)
order = np.random.permutation(n_sample)
X = X[order]                             # shuffle
y = y[order].astype(float)               # dtype casting

X_train = X[: int(0.9 * n_sample)]       # first 90%
y_train = y[: int(0.9 * n_sample)]
X_test = X[int(0.9 * n_sample) :]        # last 10%
y_test = y[int(0.9 * n_sample) :]

# SVM analysis with several kernel types
for kernel in ("linear", "rbf", "poly"):
    clf = svm.SVC(kernel=kernel, gamma=10)  # SVM object
    clf.fit(X_train, y_train)

    plt.figure()
    plt.clf()
    plt.scatter(X[:, 0], X[:, 1], c=y, zorder=10, 
                cmap=plt.cm.Paired, edgecolor="k", s=20)

# Circle out the test data
    plt.scatter(X_test[:, 0], X_test[:, 1], s=80, 
                facecolors="none", zorder=10, edgecolor="k")

    plt.axis("tight")
    x_min = X[:, 0].min()
    x_max = X[:, 0].max()
    y_min = X[:, 1].min()
    y_max = X[:, 1].max()

    XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
    Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])

# Put the result into a color plot
    Z = Z.reshape(XX.shape)
    plt.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired)
    plt.contour(XX, YY, Z,
        colors=["k", "k", "k"],
        linestyles=["--", "-", "--"],
        levels=[-0.5, 0, 0.5])

    plt.title(kernel)
plt.show()
