Skip to content

Commit

Permalink
aasd
Browse files Browse the repository at this point in the history
  • Loading branch information
DanyeongLee committed Aug 30, 2022
1 parent 5dd8c89 commit e01e05f
Show file tree
Hide file tree
Showing 15 changed files with 1,889 additions and 477 deletions.
87 changes: 87 additions & 0 deletions Untitled.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"df = pd.read_csv(\"submission.csv\")\n"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAAQYklEQVR4nO3dbYylZX3H8e+vIElVUqg7Ik/jUkNpVyOUThct1UCtdnchUhvSsjVqLc2olUaTvnBrE9ukb2ga28Zi3W6VoIkF2ypKy4IQ+4BEUGfJAouIrnSVdQm7SAsqJmbx3xdz1pzOnsOcOffMnN1rv5/kZO6H69zXfy7O/rjnPue+TqoKSVK7fmLSBUiSVpZBL0mNM+glqXEGvSQ1zqCXpMYdP+kCBlmzZk2tXbt20mVI0lFjx44dj1fV1KB9R2TQr127lrm5uUmXIUlHjSTfHLbPSzeS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktS4I/LOWC2/tVtu/vHynqsvmWAlklabZ/SS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJatyik5oluRa4FNhfVS/rbfsEcE6vyUnA/1bVeQOeuwf4LvAMcLCqZpalaknSyEaZvfI64BrgY4c2VNVvH1pO8n7gyWd5/sVV9fi4BUqSulk06KvqjiRrB+1LEuC3gF9d5rokScuk6zX6VwGPVdXXh+wv4LYkO5LMPtuBkswmmUsyd+DAgY5lSZIO6Rr0m4Hrn2X/hVV1PrAReGeSVw9rWFXbqmqmqmampqY6liVJOmTsoE9yPPCbwCeGtamqfb2f+4EbgfXj9idJGk+XM/pfA75aVXsH7UzyvCQnHloGXgfs6tCfJGkMiwZ9kuuBu4BzkuxNcmVv1xUsuGyT5LQk23urpwB3JrkX+BJwc1XdunylS5JGMcqnbjYP2f67A7btAzb1lh8Gzu1YnySpo1E+R68jxNotN/94ec/Vl0ywEklHE6dAkKTGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxnln7BGu/27YlXiud9tK7fOMXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxo3ynbHXJtmfZFfftj9L8u0kO3uPTUOeuyHJQ0l2J9mynIVLkkYzyhn9dcCGAdv/uqrO6z22L9yZ5Djgg8BGYB2wOcm6LsVKkpZu0aCvqjuAJ8Y49npgd1U9XFU/BG4ALhvjOJKkDrpco78qyX29SzsnD9h/OvBI3/re3raBkswmmUsyd+DAgQ5lSZL6jRv0HwJeApwHPAq8f0CbDNhWww5YVduqaqaqZqampsYsS5K00FhBX1WPVdUzVfUj4B+Yv0yz0F7gzL71M4B94/QnSRrfWEGf5NS+1TcAuwY0+zJwdpKzkpwAXAHcNE5/kqTxLTpNcZLrgYuANUn2An8KXJTkPOYvxewB3tZrexrw4araVFUHk1wFfBY4Dri2qh5YiV9CkjTcokFfVZsHbP7IkLb7gE1969uBwz56KUlaPd4ZK0mNM+glqXEGvSQ1zqCXpMYZ9JLUuEU/daP2rN1y89ht9lx9ycA2/dslHVk8o5ekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLUuFQN/b7uiZmZmam5ublJl7Hshk0ZMMqUBKNaqeOOW4Ok1ZFkR1XNDNrnGb0kNW7RoE9ybZL9SXb1bfvLJF9Ncl+SG5OcNOS5e5Lcn2RnkvZO0SXpKDDKGf11wIYF224HXlZVLwe+Bvzxszz/4qo6b9ifFJKklbVo0FfVHcATC7bdVlUHe6t3A2esQG2SpGWwHNfofw+4Zci+Am5LsiPJ7DL0JUlaok5fPJLkT4CDwMeHNLmwqvYleSFwe5Kv9v5CGHSsWWAWYHp6uktZkqQ+Y5/RJ3kLcCnwxhryGc2q2tf7uR+4EVg/7HhVta2qZqpqZmpqatyyJEkLjBX0STYA7wFeX1VPD2nzvCQnHloGXgfsGtRWkrRyRvl45fXAXcA5SfYmuRK4BjiR+csxO5Ns7bU9Lcn23lNPAe5Mci/wJeDmqrp1RX4LSdJQi16jr6rNAzZ/ZEjbfcCm3vLDwLmdqpMkddbpzViNb1LTE0g69jgFgiQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGOQVCB6NMY7Dn6ktWoRJJGs4zeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGjfKl4Nfm2R/kl192346ye1Jvt77efKQ525I8lCS3Um2LGfhkqTRjHJGfx2wYcG2LcDnqups4HO99f8nyXHAB4GNwDpgc5J1naqVJC3ZokFfVXcATyzYfBnw0d7yR4HfGPDU9cDuqnq4qn4I3NB7niRpFY07BcIpVfUoQFU9muSFA9qcDjzSt74XuGDYAZPMArMA09PTY5alUaZlmFQNTgchTcZKvhmbAdtqWOOq2lZVM1U1MzU1tYJlSdKxZdygfyzJqQC9n/sHtNkLnNm3fgawb8z+JEljGjfobwLe0lt+C/CZAW2+DJyd5KwkJwBX9J4nSVpFo3y88nrgLuCcJHuTXAlcDbw2ydeB1/bWSXJaku0AVXUQuAr4LPAg8E9V9cDK/BqSpGEWfTO2qjYP2fWaAW33AZv61rcD28euTpLUmXfGSlLjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIat+gXj6ibtVtunnQJko5xntFLUuPGDvok5yTZ2fd4Ksm7F7S5KMmTfW3e17liSdKSjH3ppqoeAs4DSHIc8G3gxgFNP19Vl47bjySpm+W6dPMa4BtV9c1lOp4kaZksV9BfAVw/ZN8rk9yb5JYkLx12gCSzSeaSzB04cGCZypIkdQ76JCcArwf+ecDue4AXV9W5wN8Cnx52nKraVlUzVTUzNTXVtSxJUs9ynNFvBO6pqscW7qiqp6rqe73l7cBzkqxZhj4lSSNajqDfzJDLNklelCS95fW9/r6zDH1KkkbU6YapJM8FXgu8rW/b2wGqaitwOfCOJAeBHwBXVFV16VOStDSdgr6qngZesGDb1r7la4BruvQhSerGO2MlqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc4vBx9B/xd877n6kglW0r5hX6be8rj7+tJK84xekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGdQr6JHuS3J9kZ5K5AfuT5ANJdie5L8n5XfqTJC3dctwwdXFVPT5k30bg7N7jAuBDvZ+SpFWy0pduLgM+VvPuBk5KcuoK9ylJ6tP1jL6A25IU8PdVtW3B/tOBR/rW9/a2PbrwQElmgVmA6enpjmXpSLQa0xs4nYB0uK5n9BdW1fnMX6J5Z5JXL9ifAc+pQQeqqm1VNVNVM1NTUx3LkiQd0inoq2pf7+d+4EZg/YIme4Ez+9bPAPZ16VOStDRjB32S5yU58dAy8Dpg14JmNwFv7n365hXAk1V12GUbSdLK6XKN/hTgxiSHjvOPVXVrkrcDVNVWYDuwCdgNPA28tVu5kqSlGjvoq+ph4NwB27f2LRfwznH7kCR1552xktQ4g16SGmfQS1LjDHpJapxBL0mNW45JzZoxyu3zw27j1/hGGdOFbZzeQBqdZ/SS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGucUCEM41cHRb6lTWvS3GeW5XfpdzeNMug9Nnmf0ktS4Ll8OfmaS/0jyYJIHkrxrQJuLkjyZZGfv8b5u5UqSlqrLpZuDwB9V1T1JTgR2JLm9qr6yoN3nq+rSDv1IkjoY+4y+qh6tqnt6y98FHgROX67CJEnLY1mu0SdZC/wC8MUBu1+Z5N4ktyR56XL0J0kaXedP3SR5PvBJ4N1V9dSC3fcAL66q7yXZBHwaOHvIcWaBWYDp6emuZUmSejqd0Sd5DvMh//Gq+tTC/VX1VFV9r7e8HXhOkjWDjlVV26pqpqpmpqamupQlSerT5VM3AT4CPFhVfzWkzYt67Uiyvtffd8btU5K0dF0u3VwIvAm4P8nO3rb3AtMAVbUVuBx4R5KDwA+AK6qqOvQpSVqisYO+qu4Eskiba4Brxu1DktRdc1MgLPW2dx2dlvrfcJT2XV4XK1FPV5N6nS/X9BGjcNqG0TgFgiQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGNTcFwjBOe6BxLddrZyWmYVjO13WX6UNGmYrgaPo32OX3HHacUcZ0paZ08Ixekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGdQr6JBuSPJRkd5ItA/YnyQd6++9Lcn6X/iRJSzd20Cc5DvggsBFYB2xOsm5Bs43A2b3HLPChcfuTJI2nyxn9emB3VT1cVT8EbgAuW9DmMuBjNe9u4KQkp3boU5K0RKmq8Z6YXA5sqKrf762/Cbigqq7qa/NvwNVVdWdv/XPAe6pqbsDxZpk/6wc4B3horMLasQZ4fNJFHGEck8M5Joc7VsfkxVU1NWhHl7luMmDbwv9rjNJmfmPVNmBbh3qakmSuqmYmXceRxDE5nGNyOMfkcF0u3ewFzuxbPwPYN0YbSdIK6hL0XwbOTnJWkhOAK4CbFrS5CXhz79M3rwCerKpHO/QpSVqisS/dVNXBJFcBnwWOA66tqgeSvL23fyuwHdgE7AaeBt7aveRjhpexDueYHM4xOZxjssDYb8ZKko4O3hkrSY0z6CWpcQb9hI0wjcQbe9NH3JfkC0nOnUSdq2mxMelr90tJnund09G0UcYkyUVJdiZ5IMl/rXaNq22Efzs/leRfk9zbG5Nj9z3CqvIxoQfzb2J/A/gZ4ATgXmDdgja/DJzcW94IfHHSdU96TPra/Tvzb/hfPum6Jz0mwEnAV4Dp3voLJ133ETAm7wX+orc8BTwBnDDp2ifx8Ix+shadRqKqvlBV/9NbvZv5exFaNsrUGgB/CHwS2L+axU3IKGPyO8CnqupbAFXV+riMMiYFnJgkwPOZD/qDq1vmkcGgn6zTgUf61vf2tg1zJXDLilY0eYuOSZLTgTcAW1exrkka5XXys8DJSf4zyY4kb1616iZjlDG5Bvh55m/SvB94V1X9aHXKO7J0mQJB3Y08RUSSi5kP+l9Z0Yomb5Qx+Rvm50x6Zv5krXmjjMnxwC8CrwF+Ergryd1V9bWVLm5CRhmTXwd2Ar8KvAS4Pcnnq+qpFa7tiGPQT9ZIU0QkeTnwYWBjVX1nlWqblFHGZAa4oRfya4BNSQ5W1adXpcLVN+p0I49X1feB7ye5AzgXaDXoRxmTtzI/qWIBu5P8N/BzwJdWp8Qjh5duJmvRaSSSTAOfAt7U8NlZv0XHpKrOqqq1VbUW+BfgDxoOeRhtupHPAK9KcnyS5wIXAA+ucp2raZQx+Rbzf+GQ5BTmZ8V9eFWrPEJ4Rj9BNdo0Eu8DXgD8Xe8M9mA1PDPfiGNyTBllTKrqwSS3AvcBPwI+XFW7Jlf1yhrxdfLnwHVJ7mf+Us97qupYnL7YKRAkqXVeupGkxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXH/BzARxz487+nPAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"plt.hist(df[\"Reorg_g\"], bins=100)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0.93596935"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "mol",
"language": "python",
"name": "mol"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
56 changes: 56 additions & 0 deletions configs/experiment/gem1-gc1-embed128-4layers-adam-noH.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /datamodule: datamodule2d.yaml
- override /model: gem1.yaml
- override /callbacks: default.yaml
- override /trainer: default.yaml
- override /logger: wandb.yaml

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["GEM1", "Gradient Clipping"]

name: "gem1-gc1-embed128-4layers-adam-noH"

seed: 42

trainer:
max_epochs: 100
gradient_clip_val: 1.

model:
lr: 1e-3
weight_decay: 1e-5
max_epochs: ${trainer.max_epochs}
optimizer: "adam"
encoder:
_target_: src.models.components.gem1.GEM1
embed_dim: 128
dropout: 0.1
last_act: True
n_layers: 4
pool: "mean"

classifier:
_target_: src.models.components.gem1.Classifier
input_dim: ${model.encoder.embed_dim}
hidden_dims: [256, 256]
batch_norm: True
dropout: 0.5

datamodule:
batch_size: 64
fold: 0
removeHs: True

logger:
wandb:
tags: ${tags}
group: ${name}
name: ${name}_fold${datamodule.fold}

Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /datamodule: datamodule2d.yaml
- override /model: gem1.yaml
- override /callbacks: default.yaml
- override /trainer: default.yaml
- override /logger: wandb.yaml

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["GEM1", "Gradient Clipping"]

name: "gem1-gc1-embed128-4layers-adam-onehot-epochs80-gem2data"

seed: 42

trainer:
max_epochs: 80
gradient_clip_val: 1.

model:
lr: 1e-3
weight_decay: 1e-5
max_epochs: ${trainer.max_epochs}
optimizer: "adam"
encoder:
_target_: src.models.components.gem1.GEM1_gem2data
embed_dim: 128
dropout: 0.1
last_act: True
n_layers: 4
pool: "mean"

classifier:
_target_: src.models.components.gem1.Classifier
input_dim: ${model.encoder.embed_dim}
hidden_dims: [256, 256]
batch_norm: True
dropout: 0.5

datamodule:
_target_: src.datamodules.datamodule_gem2.BaseDataModule
batch_size: 64
fold: 0
onehot: True
num_workers: 8

logger:
wandb:
tags: ${tags}
group: ${name}
name: ${name}_fold${datamodule.fold}

Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /datamodule: datamodule2d.yaml
- override /model: gem1.yaml
- override /callbacks: default.yaml
- override /trainer: default.yaml
- override /logger: wandb.yaml

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["GEM1", "Gradient Clipping"]

name: "gem1-gc1-embed128-4layers-adam-onehot-epochs80"

seed: 42

trainer:
max_epochs: 80
gradient_clip_val: 1.

model:
lr: 1e-3
weight_decay: 1e-5
max_epochs: ${trainer.max_epochs}
optimizer: "adam"
encoder:
_target_: src.models.components.gem1.GEM1_onehot
embed_dim: 128
dropout: 0.1
last_act: True
n_layers: 4
pool: "mean"

classifier:
_target_: src.models.components.gem1.Classifier
input_dim: ${model.encoder.embed_dim}
hidden_dims: [256, 256]
batch_norm: True
dropout: 0.5

datamodule:
batch_size: 64
fold: 0
onehot: True
num_workers: 8

logger:
wandb:
tags: ${tags}
group: ${name}
name: ${name}_fold${datamodule.fold}

56 changes: 56 additions & 0 deletions configs/experiment/gem1-gc1-embed128-4layers-adam-onehot.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /datamodule: datamodule2d.yaml
- override /model: gem1.yaml
- override /callbacks: default.yaml
- override /trainer: default.yaml
- override /logger: wandb.yaml

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["GEM1", "Gradient Clipping"]

name: "gem1-gc1-embed128-4layers-adam-onehot"

seed: 42

trainer:
max_epochs: 100
gradient_clip_val: 1.

model:
lr: 1e-3
weight_decay: 1e-5
max_epochs: ${trainer.max_epochs}
optimizer: "adam"
encoder:
_target_: src.models.components.gem1.GEM1_onehot
embed_dim: 128
dropout: 0.1
last_act: True
n_layers: 4
pool: "mean"

classifier:
_target_: src.models.components.gem1.Classifier
input_dim: ${model.encoder.embed_dim}
hidden_dims: [256, 256]
batch_norm: True
dropout: 0.5

datamodule:
batch_size: 64
fold: 0
onehot: True

logger:
wandb:
tags: ${tags}
group: ${name}
name: ${name}_fold${datamodule.fold}

55 changes: 55 additions & 0 deletions configs/experiment/gem1-opt.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# @package _global_

# to execute this experiment run:
# python train.py experiment=example

defaults:
- override /datamodule: datamodule2d.yaml
- override /model: gem1.yaml
- override /callbacks: default.yaml
- override /trainer: default.yaml
- override /logger: wandb.yaml

# all parameters below will be merged with parameters from default configurations set above
# this allows you to overwrite only specified parameters

tags: ["GEM1", "Gradient Clipping"]

name: "gem1-opt"

seed: 42

trainer:
max_epochs: 120
gradient_clip_val: 1.

model:
lr: 7e-3
weight_decay: 1e-3
max_epochs: ${trainer.max_epochs}
optimizer: "adamw"
encoder:
_target_: src.models.components.gem1.GEM1
embed_dim: 128
dropout: 0.25
last_act: True
n_layers: 5
pool: "mean"

classifier:
_target_: src.models.components.gem1.Classifier
input_dim: ${model.encoder.embed_dim}
hidden_dims: [1024, 256]
batch_norm: True
dropout: 0.4

datamodule:
batch_size: 128
fold: 0

logger:
wandb:
tags: ${tags}
group: ${name}
name: ${name}_fold${datamodule.fold}

File renamed without changes.
Loading

0 comments on commit e01e05f

Please sign in to comment.