Skip to content

Commit

Permalink
Port frontend to Gatsby site (#43)
Browse files Browse the repository at this point in the history
* add gatsby starter

* port some ui component and utils

* add events, sitemetadata, migrate Container from old site

* add source-filesystem and transformer-csv plugin, try to query

* add moment and shortid packages

* copy old components to new gatsby site folder, to be refactored

* get EventsContainer working

* add sub-components to EventItem component, migrating

* add untested code to description and time boxes

* making event item component appear

* fix incorrect destructuring eventData

* fixing starttime and endtime format

* Update date parsing function in ipynb folder

* fix some cases parsing wrongly

* generate csv for starttime endtime as dt

* update events.csv, fix getTime helper function

* add fa icon

* add owner label, add color generator

* add indicator for description expansion

* filter events with no title out

* adjust container size for responsive

* fix date header spacing of events group

* add navbar and put logo on it, TODO, fix menu in navbar

* extract navbar as a component, fix items on it using flex

* done porting to Gatsby, should merge  for now

* remove frontend folder

* fix list has no key

* update readme to include gatsby
  • Loading branch information
bluenex authored and titipata committed Sep 16, 2019
1 parent 8ceec38 commit 1699f2c
Show file tree
Hide file tree
Showing 60 changed files with 17,208 additions and 20,843 deletions.
24 changes: 14 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,23 +1,27 @@
# Events at University of Pennsylvania

All events happening with event recommendations at University of Pennsylvania
in an easy-to-use UI.
All events happening with event recommendations at University of Pennsylvania in an easy-to-use UI.

## Run web server locally

First, make sure that you have all Python backend as mentioned in
`backend/requirements.txt` and recent version of `npm` installed.
First, make sure that you have all Python backend as mentioned in `backend/requirements.txt` and recent version of `npm` installed (NodeJS 8+).

You can run the web-application from the `frontend` folder by the following
In order to run the web-application, if it is the first time, you need to install `gatsby-cli`:

```sh
cd fronend
# only once on first time
npm install -g gatsby-cli
```

and run from the `gatsby_site` folder as follows:

```sh
cd gatsby_site
npm install
npm start
gatsby develop
```

This will run Python backend (using Flask) on port 5001 and ReactJS frontend on port 3000.
You will see the demo looks something like the following
This will run Gatsby site on port `8000`. You will see the demo looks something like the following:

<img src="demo.png" width="400" />

Expand All @@ -31,5 +35,5 @@ You will see the demo looks something like the following

## Contributions

We are very welcome to all contribution. If you spot any errors, please
We are very welcome to all contribution. If you spot any errors, please
feel free to report in the issue folder.
1,075 changes: 1,075 additions & 0 deletions backend/data/events.csv

Large diffs are not rendered by default.

1,075 changes: 1,075 additions & 0 deletions backend/data/events_dt.csv

Large diffs are not rendered by default.

208 changes: 208 additions & 0 deletions backend/ipynb/regex_startend_time.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"import re\n",
"from dateutil import parser\n",
"from datetime import datetime, timedelta"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"with open('../data/events.csv') as f:\n",
" data = pd.read_csv(f)\n",
" starttime = data['starttime']\n",
" endtime = data['endtime']"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def sub_pattern(time_list, compiled_pats, is_endtime=False):\n",
" collector = list()\n",
" \n",
" for st in time_list:\n",
" # if time is not null\n",
" if not pd.isnull(st) and st != '':\n",
" # some time contains '^ ' so it doesn't match properly\n",
" st = st.strip()\n",
" # pass regex patterns\n",
" check = list(map(lambda x: x.sub(r'\\1', st) if x.match(st) else None, pats))\n",
" if any(check):\n",
" # if match any pattern, append to collector\n",
" subbed = next(item for item in check if item is not None)\n",
" collector.append(subbed)\n",
" else:\n",
" # if doesnt match, and only have one pattern\n",
" # meaning it is endtime in starttime pattern\n",
" # so put None to all except the match one\n",
" if len(compiled_pats) == 1:\n",
" collector.append(None)\n",
" else:\n",
" # if doesnt match any pattern assume this is allday\n",
" # e.g., '(All day)', 'All day', 'May 21, 2013' falls here\n",
" collector.append('allday')\n",
" # if time is null\n",
" else:\n",
" collector.append(None)\n",
"# # if not endtime\n",
"# if not is_endtime:\n",
"# # and it is endtime in starttime pattern\n",
"# if len(compiled_pats) == 1:\n",
"# # put None\n",
"# collector.append(None)\n",
"# # it is a starttime, assume it is allday\n",
"# else:\n",
"# collector.append('allday')\n",
"# else:\n",
"# collector.append(None)\n",
" \n",
" return collector\n",
"\n",
"# TODO\n",
"def cleansing(ts_te_tuple):\n",
" if not all(ts_te_tuple):\n",
" return(ts_te_tuple)\n",
" \n",
" artifacts_removed = tuple(map(lambda x: x.replace(' ', '').replace('.', '').lower(), ts_te_tuple))\n",
" dt_tuple = tuple(parser.parse(elem).time() if elem != 'allday' else elem for elem in artifacts_removed)\n",
" return dt_tuple"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# pattern for starttime and endtime \n",
"patterns = [\n",
" r'^[a-zA-Z]+, [a-zA-Z]+ [0-9]{1,2}, [0-9]{4} - (.*)',\n",
" r'^([0-9]{1,2}:[0-9]{1,2}\\s?[apmAPM\\.]{0,4}).*',\n",
" r'^([0-9]{1,2}\\s?[apmAPM\\.]{1,4}).*',\n",
"]\n",
"\n",
"# compile to put as argument\n",
"pats = [re.compile(pat) for pat in patterns]\n",
"\n",
"starttime_subbed = sub_pattern(starttime, compiled_pats=pats)\n",
"endtime_subbed = sub_pattern(endtime, compiled_pats=pats, is_endtime=True)\n",
"\n",
"# pattern for extracting endtime out of starttime\n",
"patterns = [\n",
" r'^[0-9]{1,2}:[0-9]{1,2}\\s?[apmAPM\\.]{0,4}\\s?-\\s?([0-9]{1,2}:[0-9]{1,2}\\s?[apmAPM\\.]{0,4})'\n",
"]\n",
"\n",
"# compile to put as argument\n",
"pats = [re.compile(pat) for pat in patterns]\n",
"\n",
"endtime_in_starttime_subbed = sub_pattern(starttime, compiled_pats=pats)\n",
"\n",
"time_showed = []\n",
"for ts, te, k in zip(starttime_subbed, endtime_subbed, endtime_in_starttime_subbed):\n",
" # merge k into te first\n",
" if pd.isnull(te) and not pd.isnull(k):\n",
" time_showed.append((ts, k, k))\n",
" # +1 hr to all null te, except ts is allday\n",
" elif not pd.isnull(ts) and ts is not 'allday' and pd.isnull(te):\n",
" te = (parser.parse(ts) + timedelta(hours=1)).strftime(\"%I:%M %p\")\n",
" time_showed.append((ts, te, k))\n",
" # fill allday to te\n",
" elif ts == 'allday' and pd.isnull(te):\n",
" te = 'allday'\n",
" time_showed.append((ts, te, k))\n",
" # fill None\n",
" elif pd.isnull(ts) and pd.isnull(te):\n",
" time_showed.append((None, None, k))\n",
" # nothing to do\n",
" else:\n",
" time_showed.append((ts, te, k))\n",
" \n",
"# keep only ts and te\n",
"time_showed = [(ts, te) for (ts, te, k) in time_showed]\n",
" \n",
"# TODO\n",
"# if endtime is nan but starttime is not, endtime = starttime + 1 hour\n",
"# if endtime is nan but starttime is All day, endtime = All day\n",
"# if endtime is nan and starttime is nan, endtime = null, starttime = null\n",
"# if endtime is nan but starttime is XX:XX - YY:YY, endtime = YY:YY //"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"list(zip(time_showed, starttime_subbed, endtime_subbed))\n",
"time_showed[10:20]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# cleaning\n",
"final = [cleansing(x) for x in time_showed]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"final_starttime = [ts for (ts, te) in final]\n",
"final_endtime = [te for (ts, te) in final]\n",
"final_endtime"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"new_df = data.assign(starttime_dt=final_starttime, endtime_dt=final_endtime)\n",
"new_df.to_csv('../data/events_dt.csv', index=False)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
18 changes: 0 additions & 18 deletions frontend/.editorconfig

This file was deleted.

18 changes: 0 additions & 18 deletions frontend/.eslintrc.json

This file was deleted.

8 changes: 0 additions & 8 deletions frontend/README.md

This file was deleted.

Loading

0 comments on commit 1699f2c

Please sign in to comment.