-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add gatsby starter * port some ui component and utils * add events, sitemetadata, migrate Container from old site * add source-filesystem and transformer-csv plugin, try to query * add moment and shortid packages * copy old components to new gatsby site folder, to be refactored * get EventsContainer working * add sub-components to EventItem component, migrating * add untested code to description and time boxes * making event item component appear * fix incorrect destructuring eventData * fixing starttime and endtime format * Update date parsing function in ipynb folder * fix some cases parsing wrongly * generate csv for starttime endtime as dt * update events.csv, fix getTime helper function * add fa icon * add owner label, add color generator * add indicator for description expansion * filter events with no title out * adjust container size for responsive * fix date header spacing of events group * add navbar and put logo on it, TODO, fix menu in navbar * extract navbar as a component, fix items on it using flex * done porting to Gatsby, should merge for now * remove frontend folder * fix list has no key * update readme to include gatsby
- Loading branch information
Showing
60 changed files
with
17,208 additions
and
20,843 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,208 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"import pandas as pd\n", | ||
"import re\n", | ||
"from dateutil import parser\n", | ||
"from datetime import datetime, timedelta" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"with open('../data/events.csv') as f:\n", | ||
" data = pd.read_csv(f)\n", | ||
" starttime = data['starttime']\n", | ||
" endtime = data['endtime']" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"def sub_pattern(time_list, compiled_pats, is_endtime=False):\n", | ||
" collector = list()\n", | ||
" \n", | ||
" for st in time_list:\n", | ||
" # if time is not null\n", | ||
" if not pd.isnull(st) and st != '':\n", | ||
" # some time contains '^ ' so it doesn't match properly\n", | ||
" st = st.strip()\n", | ||
" # pass regex patterns\n", | ||
" check = list(map(lambda x: x.sub(r'\\1', st) if x.match(st) else None, pats))\n", | ||
" if any(check):\n", | ||
" # if match any pattern, append to collector\n", | ||
" subbed = next(item for item in check if item is not None)\n", | ||
" collector.append(subbed)\n", | ||
" else:\n", | ||
" # if doesnt match, and only have one pattern\n", | ||
" # meaning it is endtime in starttime pattern\n", | ||
" # so put None to all except the match one\n", | ||
" if len(compiled_pats) == 1:\n", | ||
" collector.append(None)\n", | ||
" else:\n", | ||
" # if doesnt match any pattern assume this is allday\n", | ||
" # e.g., '(All day)', 'All day', 'May 21, 2013' falls here\n", | ||
" collector.append('allday')\n", | ||
" # if time is null\n", | ||
" else:\n", | ||
" collector.append(None)\n", | ||
"# # if not endtime\n", | ||
"# if not is_endtime:\n", | ||
"# # and it is endtime in starttime pattern\n", | ||
"# if len(compiled_pats) == 1:\n", | ||
"# # put None\n", | ||
"# collector.append(None)\n", | ||
"# # it is a starttime, assume it is allday\n", | ||
"# else:\n", | ||
"# collector.append('allday')\n", | ||
"# else:\n", | ||
"# collector.append(None)\n", | ||
" \n", | ||
" return collector\n", | ||
"\n", | ||
"# TODO\n", | ||
"def cleansing(ts_te_tuple):\n", | ||
" if not all(ts_te_tuple):\n", | ||
" return(ts_te_tuple)\n", | ||
" \n", | ||
" artifacts_removed = tuple(map(lambda x: x.replace(' ', '').replace('.', '').lower(), ts_te_tuple))\n", | ||
" dt_tuple = tuple(parser.parse(elem).time() if elem != 'allday' else elem for elem in artifacts_removed)\n", | ||
" return dt_tuple" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# pattern for starttime and endtime \n", | ||
"patterns = [\n", | ||
" r'^[a-zA-Z]+, [a-zA-Z]+ [0-9]{1,2}, [0-9]{4} - (.*)',\n", | ||
" r'^([0-9]{1,2}:[0-9]{1,2}\\s?[apmAPM\\.]{0,4}).*',\n", | ||
" r'^([0-9]{1,2}\\s?[apmAPM\\.]{1,4}).*',\n", | ||
"]\n", | ||
"\n", | ||
"# compile to put as argument\n", | ||
"pats = [re.compile(pat) for pat in patterns]\n", | ||
"\n", | ||
"starttime_subbed = sub_pattern(starttime, compiled_pats=pats)\n", | ||
"endtime_subbed = sub_pattern(endtime, compiled_pats=pats, is_endtime=True)\n", | ||
"\n", | ||
"# pattern for extracting endtime out of starttime\n", | ||
"patterns = [\n", | ||
" r'^[0-9]{1,2}:[0-9]{1,2}\\s?[apmAPM\\.]{0,4}\\s?-\\s?([0-9]{1,2}:[0-9]{1,2}\\s?[apmAPM\\.]{0,4})'\n", | ||
"]\n", | ||
"\n", | ||
"# compile to put as argument\n", | ||
"pats = [re.compile(pat) for pat in patterns]\n", | ||
"\n", | ||
"endtime_in_starttime_subbed = sub_pattern(starttime, compiled_pats=pats)\n", | ||
"\n", | ||
"time_showed = []\n", | ||
"for ts, te, k in zip(starttime_subbed, endtime_subbed, endtime_in_starttime_subbed):\n", | ||
" # merge k into te first\n", | ||
" if pd.isnull(te) and not pd.isnull(k):\n", | ||
" time_showed.append((ts, k, k))\n", | ||
" # +1 hr to all null te, except ts is allday\n", | ||
" elif not pd.isnull(ts) and ts is not 'allday' and pd.isnull(te):\n", | ||
" te = (parser.parse(ts) + timedelta(hours=1)).strftime(\"%I:%M %p\")\n", | ||
" time_showed.append((ts, te, k))\n", | ||
" # fill allday to te\n", | ||
" elif ts == 'allday' and pd.isnull(te):\n", | ||
" te = 'allday'\n", | ||
" time_showed.append((ts, te, k))\n", | ||
" # fill None\n", | ||
" elif pd.isnull(ts) and pd.isnull(te):\n", | ||
" time_showed.append((None, None, k))\n", | ||
" # nothing to do\n", | ||
" else:\n", | ||
" time_showed.append((ts, te, k))\n", | ||
" \n", | ||
"# keep only ts and te\n", | ||
"time_showed = [(ts, te) for (ts, te, k) in time_showed]\n", | ||
" \n", | ||
"# TODO\n", | ||
"# if endtime is nan but starttime is not, endtime = starttime + 1 hour\n", | ||
"# if endtime is nan but starttime is All day, endtime = All day\n", | ||
"# if endtime is nan and starttime is nan, endtime = null, starttime = null\n", | ||
"# if endtime is nan but starttime is XX:XX - YY:YY, endtime = YY:YY //" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": { | ||
"scrolled": true | ||
}, | ||
"outputs": [], | ||
"source": [ | ||
"list(zip(time_showed, starttime_subbed, endtime_subbed))\n", | ||
"time_showed[10:20]" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# cleaning\n", | ||
"final = [cleansing(x) for x in time_showed]" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"final_starttime = [ts for (ts, te) in final]\n", | ||
"final_endtime = [te for (ts, te) in final]\n", | ||
"final_endtime" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"new_df = data.assign(starttime_dt=final_starttime, endtime_dt=final_endtime)\n", | ||
"new_df.to_csv('../data/events_dt.csv', index=False)" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"kernelspec": { | ||
"display_name": "Python 3", | ||
"language": "python", | ||
"name": "python3" | ||
}, | ||
"language_info": { | ||
"codemirror_mode": { | ||
"name": "ipython", | ||
"version": 3 | ||
}, | ||
"file_extension": ".py", | ||
"mimetype": "text/x-python", | ||
"name": "python", | ||
"nbconvert_exporter": "python", | ||
"pygments_lexer": "ipython3", | ||
"version": "3.7.2" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 2 | ||
} |
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.