From 391d84c5beed4b20226ce0317d6013656e4a4375 Mon Sep 17 00:00:00 2001 From: mdavid Date: Wed, 9 Mar 2022 12:27:44 +0100 Subject: [PATCH 1/2] isue #162 homogenisation of examples + reduction of docstring + code block in new line --- module08/en.subject.tex | 27 ++++++++++++++++----------- version | 2 +- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/module08/en.subject.tex b/module08/en.subject.tex index 7e1b4e3a..20daa9fc 100644 --- a/module08/en.subject.tex +++ b/module08/en.subject.tex @@ -1251,9 +1251,10 @@ \section*{Instructions} """ Compute confusion matrix to evaluate the accuracy of a classification. Args: - y_true:a numpy.ndarray for the correct labels - y_hat:a numpy.ndarray for the predicted labels - labels: optional, a list of labels to index the matrix. This may be used to reorder or select a subset of labels. (default=None) + y_true: numpy.ndarray for the correct labels + y_hat: numpy.ndarray for the predicted labels + labels: Optional, a list of labels to index the matrix. + This may be used to reorder or select a subset of labels. (default=None) Returns: The confusion matrix as a numpy ndarray. None on any error. @@ -1310,17 +1311,20 @@ \subsection{Objective(s):} \subsection{Instructions:} In the \texttt{confusion\_matrix.py} file, write the following function as per the instructions below: + \begin{minted}[bgcolor=darcula-back,formatcom=\color{lightgrey},fontsize=\scriptsize]{python} def confusion_matrix_(y, y_hat, labels=None, df_option=False): """ Compute confusion matrix to evaluate the accuracy of a classification. Args: - y:a numpy.ndarray for the correct labels - y_hat:a numpy.ndarray for the predicted labels - labels: optional, a list of labels to index the matrix. This may be used to reorder or select a subset of labels. (default=None) - df_option: optional, if set to True the function will return a pandas DataFrame instead of a numpy array. (default=False) + y: numpy.ndarray for the correct labels + y_hat: numpy.ndarray for the predicted labels + labels: Optional, a list of labels to index the matrix. + This may be used to reorder or select a subset of labels. (default=None) + df_option: Optional, if set to True the return is a pandas DataFrame instead of a numpy array. + (default=False) Returns: - The confusion matrix as a numpy ndarray or a pandas DataFrame according to df_option value. + Confusion matrix as a numpy ndarray or a pandas DataFrame according to df_option value. None on any error. Raises: This function should not raise any Exception. @@ -1329,13 +1333,14 @@ \subsection{Instructions:} \end{minted} \section{Examples:} + \begin{minted}[bgcolor=darcula-back,formatcom=\color{lightgrey},fontsize=\scriptsize]{python} import numpy as np y_hat = np.array(['norminet', 'dog', 'norminet', 'norminet', 'dog', 'bird']) -y_true = np.array(['dog', 'dog', 'norminet', 'norminet', 'dog', 'norminet']) +y = np.array(['dog', 'dog', 'norminet', 'norminet', 'dog', 'norminet']) # Example 1: -confusion_matrix_(y_true, y_hat, df_option=True) +confusion_matrix_(y, y_hat, df_option=True) # Output: bird dog norminet bird 0 0 0 @@ -1343,7 +1348,7 @@ \section{Examples:} norminet 1 0 2 # Example 2: -confusion_matrix_(y_true, y_hat, labels=['bird', 'dog'], df_option=True) +confusion_matrix_(y, y_hat, labels=['bird', 'dog'], df_option=True) # Output: bird dog bird 0 0 diff --git a/version b/version index 7cc166d3..f05f17cf 100644 --- a/version +++ b/version @@ -1 +1 @@ -v4.0.3 +v4.0.4 From 0cb0e8a13678294b6df23a464f9efde0a8f2a16c Mon Sep 17 00:00:00 2001 From: mdavid Date: Wed, 16 Mar 2022 10:13:25 +0100 Subject: [PATCH 2/2] typo in docstring --- module08/en.subject.tex | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/module08/en.subject.tex b/module08/en.subject.tex index a009c305..155af25d 100644 --- a/module08/en.subject.tex +++ b/module08/en.subject.tex @@ -1313,16 +1313,14 @@ \subsection{Instructions:} In the \texttt{confusion\_matrix.py} file, write the following function as per the instructions below: \begin{minted}[bgcolor=darcula-back,formatcom=\color{lightgrey},fontsize=\scriptsize]{python} -def confusion_matrix_(y, y_hat, labels=None, df_option=False): +def confusion_matrix_(y_true, y_hat, labels=None, df_option=False): """ Compute confusion matrix to evaluate the accuracy of a classification. Args: - y: numpy.ndarray for the correct labels - y_hat: numpy.ndarray for the predicted labels - labels: Optional, a list of labels to index the matrix. - This may be used to reorder or select a subset of labels. (default=None) - df_option: Optional, if set to True the return is a pandas DataFrame instead of a numpy array. - (default=False) + y_true: a numpy.ndarray for the correct labels + y_hat: a numpy.ndarray for the predicted labels + labels: optional, a list of labels to index the matrix. This may be used to reorder or select a subset of labels. (default=None) + df_option: optional, if set to True the function will return a pandas DataFrame instead of a numpy array. (default=False) Returns: Confusion matrix as a numpy ndarray or a pandas DataFrame according to df_option value. None on any error.