Skip to content

Commit

Permalink
Add code folder
Browse files Browse the repository at this point in the history
  • Loading branch information
mamintoosi committed Sep 29, 2024
1 parent 327a34d commit 98941f5
Show file tree
Hide file tree
Showing 18 changed files with 5,538 additions and 0 deletions.
678 changes: 678 additions & 0 deletions code/01_Auto_Grad.ipynb

Large diffs are not rendered by default.

1,678 changes: 1,678 additions & 0 deletions code/02_Regression_Gradient_Descent.ipynb

Large diffs are not rendered by default.

68 changes: 68 additions & 0 deletions code/BCE.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"x = [-2.2 -1.4 -0.8 0.2 0.4 0.8 1.2 2.2 2.9 4.6]\n",
"y = [0. 0. 1. 0. 1. 1. 1. 1. 1. 1.]\n",
"p(y) = [0.19 0.33 0.47 0.7 0.74 0.81 0.86 0.94 0.97 0.99]\n",
"Log Loss / Cross Entropy = 0.3329\n"
]
}
],
"source": [
"from sklearn.linear_model import LogisticRegression\n",
"from sklearn.metrics import log_loss\n",
"import numpy as np\n",
"\n",
"x = np.array([-2.2, -1.4, -.8, .2, .4, .8, 1.2, 2.2, 2.9, 4.6])\n",
"y = np.array([0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])\n",
"\n",
"logr = LogisticRegression(solver='lbfgs')\n",
"logr.fit(x.reshape(-1, 1), y)\n",
"\n",
"y_pred = logr.predict_proba(x.reshape(-1, 1))[:, 1].ravel()\n",
"loss = log_loss(y, y_pred)\n",
"\n",
"print('x = {}'.format(x))\n",
"print('y = {}'.format(y))\n",
"print('p(y) = {}'.format(np.round(y_pred, 2)))\n",
"print('Log Loss / Cross Entropy = {:.4f}'.format(loss))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "tf",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.17"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
240 changes: 240 additions & 0 deletions code/GAN/GAN-01.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,240 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## GAN\n",
"\n",
"https://machinelearningmastery.com/how-to-develop-a-generative-adversarial-network-for-a-1-dimensional-function-from-scratch-in-keras/"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# define and fit a discriminator model\n",
"from numpy import zeros\n",
"from numpy import ones\n",
"from numpy import hstack\n",
"from numpy.random import rand\n",
"from numpy.random import randn\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense\n",
"\n",
"# define the standalone discriminator model\n",
"def define_discriminator(n_inputs=2):\n",
"\tmodel = Sequential()\n",
"\tmodel.add(Dense(25, activation='relu', kernel_initializer='he_uniform', input_dim=n_inputs))\n",
"\tmodel.add(Dense(1, activation='sigmoid'))\n",
"\t# compile model\n",
"\tmodel.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n",
"\treturn model\n",
"\n",
"# generate n real samples with class labels\n",
"def generate_real_samples(n):\n",
"\t# generate inputs in [-0.5, 0.5]\n",
"\tX1 = rand(n) - 0.5\n",
"\t# generate outputs X^2\n",
"\tX2 = X1 * X1\n",
"\t# stack arrays\n",
"\tX1 = X1.reshape(n, 1)\n",
"\tX2 = X2.reshape(n, 1)\n",
"\tX = hstack((X1, X2))\n",
"\t# generate class labels\n",
"\ty = ones((n, 1))\n",
"\treturn X, y\n",
"\n",
"# generate n fake samples with class labels\n",
"def generate_fake_samples(n):\n",
"\t# generate inputs in [-1, 1]\n",
"\tX1 = -1 + rand(n) * 2\n",
"\t# generate outputs in [-1, 1]\n",
"\tX2 = -1 + rand(n) * 2\n",
"\t# stack arrays\n",
"\tX1 = X1.reshape(n, 1)\n",
"\tX2 = X2.reshape(n, 1)\n",
"\tX = hstack((X1, X2))\n",
"\t# generate class labels\n",
"\ty = zeros((n, 1))\n",
"\treturn X, y\n",
"\n",
"# train the discriminator model\n",
"def train_discriminator(model, n_epochs=1000, n_batch=128):\n",
"\thalf_batch = int(n_batch / 2)\n",
"\t# run epochs manually\n",
"\tfor i in range(n_epochs):\n",
"\t\t# generate real examples\n",
"\t\tX_real, y_real = generate_real_samples(half_batch)\n",
"\t\t# update model\n",
"\t\tmodel.train_on_batch(X_real, y_real)\n",
"\t\t# generate fake examples\n",
"\t\tX_fake, y_fake = generate_fake_samples(half_batch)\n",
"\t\t# update model\n",
"\t\tmodel.train_on_batch(X_fake, y_fake)\n",
"\t\t# evaluate the model\n",
"\t\t_, acc_real = model.evaluate(X_real, y_real, verbose=0)\n",
"\t\t_, acc_fake = model.evaluate(X_fake, y_fake, verbose=0)\n",
"\t\tprint(i, acc_real, acc_fake)\n",
"\n",
"# define the discriminator model\n",
"model = define_discriminator()\n",
"# fit the model\n",
"train_discriminator(model)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# train a generative adversarial network on a one-dimensional function\n",
"from numpy import hstack\n",
"from numpy import zeros\n",
"from numpy import ones\n",
"from numpy.random import rand\n",
"from numpy.random import randn\n",
"from keras.models import Sequential\n",
"from keras.layers import Dense\n",
"from matplotlib import pyplot\n",
"\n",
"# define the standalone discriminator model\n",
"def define_discriminator(n_inputs=2):\n",
"\tmodel = Sequential()\n",
"\tmodel.add(Dense(25, activation='relu', kernel_initializer='he_uniform', input_dim=n_inputs))\n",
"\tmodel.add(Dense(1, activation='sigmoid'))\n",
"\t# compile model\n",
"\tmodel.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])\n",
"\treturn model\n",
"\n",
"# define the standalone generator model\n",
"def define_generator(latent_dim, n_outputs=2):\n",
"\tmodel = Sequential()\n",
"\tmodel.add(Dense(15, activation='relu', kernel_initializer='he_uniform', input_dim=latent_dim))\n",
"\tmodel.add(Dense(n_outputs, activation='linear'))\n",
"\treturn model\n",
"\n",
"# define the combined generator and discriminator model, for updating the generator\n",
"def define_gan(generator, discriminator):\n",
"\t# make weights in the discriminator not trainable\n",
"\tdiscriminator.trainable = False\n",
"\t# connect them\n",
"\tmodel = Sequential()\n",
"\t# add generator\n",
"\tmodel.add(generator)\n",
"\t# add the discriminator\n",
"\tmodel.add(discriminator)\n",
"\t# compile model\n",
"\tmodel.compile(loss='binary_crossentropy', optimizer='adam')\n",
"\treturn model\n",
"\n",
"# generate n real samples with class labels\n",
"def generate_real_samples(n):\n",
"\t# generate inputs in [-0.5, 0.5]\n",
"\tX1 = rand(n) - 0.5\n",
"\t# generate outputs X^2\n",
"\tX2 = X1 * X1\n",
"\t# stack arrays\n",
"\tX1 = X1.reshape(n, 1)\n",
"\tX2 = X2.reshape(n, 1)\n",
"\tX = hstack((X1, X2))\n",
"\t# generate class labels\n",
"\ty = ones((n, 1))\n",
"\treturn X, y\n",
"\n",
"# generate points in latent space as input for the generator\n",
"def generate_latent_points(latent_dim, n):\n",
"\t# generate points in the latent space\n",
"\tx_input = randn(latent_dim * n)\n",
"\t# reshape into a batch of inputs for the network\n",
"\tx_input = x_input.reshape(n, latent_dim)\n",
"\treturn x_input\n",
"\n",
"# use the generator to generate n fake examples, with class labels\n",
"def generate_fake_samples(generator, latent_dim, n):\n",
"\t# generate points in latent space\n",
"\tx_input = generate_latent_points(latent_dim, n)\n",
"\t# predict outputs\n",
"\tX = generator.predict(x_input)\n",
"\t# create class labels\n",
"\ty = zeros((n, 1))\n",
"\treturn X, y\n",
"\n",
"# evaluate the discriminator and plot real and fake points\n",
"def summarize_performance(epoch, generator, discriminator, latent_dim, n=100):\n",
"\t# prepare real samples\n",
"\tx_real, y_real = generate_real_samples(n)\n",
"\t# evaluate discriminator on real examples\n",
"\t_, acc_real = discriminator.evaluate(x_real, y_real, verbose=0)\n",
"\t# prepare fake examples\n",
"\tx_fake, y_fake = generate_fake_samples(generator, latent_dim, n)\n",
"\t# evaluate discriminator on fake examples\n",
"\t_, acc_fake = discriminator.evaluate(x_fake, y_fake, verbose=0)\n",
"\t# summarize discriminator performance\n",
"\tprint(epoch, acc_real, acc_fake)\n",
"\t# scatter plot real and fake data points\n",
"\tpyplot.scatter(x_real[:, 0], x_real[:, 1], color='red')\n",
"\tpyplot.scatter(x_fake[:, 0], x_fake[:, 1], color='blue')\n",
"\tpyplot.show()\n",
"\n",
"# train the generator and discriminator\n",
"def train(g_model, d_model, gan_model, latent_dim, n_epochs=10000, n_batch=128, n_eval=2000):\n",
"\t# determine half the size of one batch, for updating the discriminator\n",
"\thalf_batch = int(n_batch / 2)\n",
"\t# manually enumerate epochs\n",
"\tfor i in range(n_epochs):\n",
"\t\t# prepare real samples\n",
"\t\tx_real, y_real = generate_real_samples(half_batch)\n",
"\t\t# prepare fake examples\n",
"\t\tx_fake, y_fake = generate_fake_samples(g_model, latent_dim, half_batch)\n",
"\t\t# update discriminator\n",
"\t\td_model.train_on_batch(x_real, y_real)\n",
"\t\td_model.train_on_batch(x_fake, y_fake)\n",
"\t\t# prepare points in latent space as input for the generator\n",
"\t\tx_gan = generate_latent_points(latent_dim, n_batch)\n",
"\t\t# create inverted labels for the fake samples\n",
"\t\ty_gan = ones((n_batch, 1))\n",
"\t\t# update the generator via the discriminator's error\n",
"\t\tgan_model.train_on_batch(x_gan, y_gan)\n",
"\t\t# evaluate the model every n_eval epochs\n",
"\t\tif (i+1) % n_eval == 0:\n",
"\t\t\tsummarize_performance(i, g_model, d_model, latent_dim)\n",
"\n",
"# size of the latent space\n",
"latent_dim = 5\n",
"# create the discriminator\n",
"discriminator = define_discriminator()\n",
"# create the generator\n",
"generator = define_generator(latent_dim)\n",
"# create the gan\n",
"gan_model = define_gan(generator, discriminator)\n",
"# train model\n",
"train(generator, discriminator, gan_model, latent_dim)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "p310",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit 98941f5

Please sign in to comment.