From a7a0a7ba7b962212b576f2735624cf1418325176 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Sun, 15 Jun 2025 00:56:49 +0200 Subject: [PATCH 01/13] feat: add health checks and Docker setup - Add health check routes (/api/health, /api/ready, /api/metrics) - Add Docker Compose for local development - Add Dockerfile.dev for development builds - Update email service with development mode - Fix middleware order for health endpoints - Update .gitignore and .dockerignore --- .dockerignore | 36 ++++++- .gitignore | 70 ++++++-------- Dockerfile.app | 47 --------- Dockerfile.dev | 32 +++++++ bun.lockb | Bin 89437 -> 72839 bytes docker-compose.yaml | 126 ++++++++++++++++++++++++ index.ts | 111 +++++++++++++++++----- lib/email.ts | 169 ++++++++++++++++++++++++--------- package.json | 2 +- routes/health.ts | 227 ++++++++++++++++++++++++++++++++++++++++++++ types.ts | 36 +++---- 11 files changed, 670 insertions(+), 186 deletions(-) delete mode 100644 Dockerfile.app create mode 100644 Dockerfile.dev create mode 100644 docker-compose.yaml create mode 100644 routes/health.ts diff --git a/.dockerignore b/.dockerignore index d71be85..9187472 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,5 @@ -#.dockerignore - node_modules +npm-debug.log Dockerfile* docker-compose* .dockerignore @@ -9,9 +8,36 @@ docker-compose* README.md LICENSE .vscode +.devcontainer Makefile -helm-charts -.env +helm-charts/ +helm/ +k8s/ +.env* .editorconfig .idea -coverage* \ No newline at end of file +coverage* +*.log +.cache +.DS_Store +Thumbs.db + +# Source files not needed in container +bruno/ +sample_data*.json +sample.env +seeds/ +init-db/ +*.md +fly.toml + +# Development scripts +setup-dataviz.sh +*.sh + +# Test files +test/ +tests/ +__tests__/ +*.test.* +*.spec.* \ No newline at end of file diff --git a/.gitignore b/.gitignore index 222cebc..c07cce3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,105 +1,83 @@ # Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore # Logs - logs -_.log -npm-debug.log_ +*.log +npm-debug.log* yarn-debug.log* yarn-error.log* lerna-debug.log* .pnpm-debug.log* # Caches - .cache # Diagnostic reports (https://nodejs.org/api/report.html) - -report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json # Runtime data - pids -_.pid -_.seed +*.pid +*.seed *.pid.lock # Directory for instrumented libs generated by jscoverage/JSCover - lib-cov # Coverage directory used by tools like istanbul - coverage *.lcov # nyc test coverage - .nyc_output # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) - .grunt # Bower dependency directory (https://bower.io/) - bower_components # node-waf configuration - .lock-wscript # Compiled binary addons (https://nodejs.org/api/addons.html) - build/Release # Dependency directories - node_modules/ jspm_packages/ # Snowpack dependency directory (https://snowpack.dev/) - web_modules/ # TypeScript cache - *.tsbuildinfo # Optional npm cache directory - .npm # Optional eslint cache - .eslintcache # Optional stylelint cache - .stylelintcache # Microbundle cache - .rpt2_cache/ .rts2_cache_cjs/ .rts2_cache_es/ .rts2_cache_umd/ # Optional REPL history - .node_repl_history # Output of 'npm pack' - *.tgz # Yarn Integrity file - .yarn-integrity # dotenv environment variable files - .env .env.development.local .env.test.local @@ -107,61 +85,46 @@ web_modules/ .env.local # parcel-bundler cache (https://parceljs.org/) - .parcel-cache # Next.js build output - .next out # Nuxt.js build / generate output - .nuxt dist # Gatsby files - # Comment in the public line in if your project uses Gatsby and not Next.js - # https://nextjs.org/blog/next-9-1#public-directory-support - # public # vuepress build output - .vuepress/dist # vuepress v2.x temp and cache directory - .temp # Docusaurus cache and generated files - .docusaurus # Serverless directories - .serverless/ # FuseBox cache - .fusebox/ # DynamoDB Local files - .dynamodb/ # TernJS port file - .tern-port # Stores VSCode versions used for testing VSCode extensions - .vscode-test # yarn v2 - .yarn/cache .yarn/unplugged .yarn/build-state.yml @@ -175,5 +138,26 @@ dist .DS_Store .vercel - data + +# Development scripts +setup-dataviz.sh +*.sh + +# Database +*.db +*.sqlite +*.sql +prisma/ + +# OS generated files +Thumbs.db + +# Bun specific +bun.lockb.bak +#*.lockb + +# Kubernetes/Helm +helm/ +k8s/ +*.yaml.backup \ No newline at end of file diff --git a/Dockerfile.app b/Dockerfile.app deleted file mode 100644 index 6c421bc..0000000 --- a/Dockerfile.app +++ /dev/null @@ -1,47 +0,0 @@ -# BASE Stage -FROM oven/bun:1 AS base - -# setup all global artifacts. why Node? A: https://github.com/oven-sh/bun/issues/4848 -RUN apt update \ - && apt install -y curl - -ARG NODE_VERSION=20 -RUN curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n \ - && bash n $NODE_VERSION \ - && rm n \ - && npm install -g n - - -# INSTALL Stage - -# install dependencies into temp folder. this will cache them and speed up future builds -FROM base AS install -WORKDIR /temp/prod/ -COPY package.json bun.lockb ./ -RUN bun install --frozen-lockfile --production - - -# PRERELEASE Stage - -# copy node_modules from temp folder. then copy all (non-ignored) project files into the image -FROM install AS prerelease - -WORKDIR /usr/src/app - -COPY --from=install /temp/prod/node_modules node_modules -COPY . . -RUN npx prisma generate - -# RELEASE Stage - -FROM base AS release -COPY --from=prerelease /usr/src/app/node_modules ./node_modules -COPY --from=prerelease /usr/src/app/index.ts . -COPY --from=prerelease /usr/src/app/lib ./lib -COPY --from=prerelease /usr/src/app/routes ./routes -COPY --from=prerelease /usr/src/app/package.json . - -# run the app -USER bun -EXPOSE 3003/tcp -CMD ["bun", "run", "index.ts"] \ No newline at end of file diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 0000000..054edb4 --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,32 @@ +FROM oven/bun:1 AS base + +# Setup Node.js per compatibilitΓ  Prisma +RUN apt update && apt install -y curl +ARG NODE_VERSION=20 +RUN curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n \ + && bash n $NODE_VERSION \ + && rm n \ + && npm install -g n + +WORKDIR /usr/src/app + +# Copy package files first per Docker layer caching +COPY package.json bun.lockb ./ + +# Install dependencies +RUN bun install + +# Copy prisma schema +COPY prisma ./prisma + +# Generate Prisma client +RUN npx prisma generate + +# Copy source code +COPY . . + +# Expose port +EXPOSE 3003 + +# Default command for development with hot reload +CMD ["bun", "--hot", "index.ts"] \ No newline at end of file diff --git a/bun.lockb b/bun.lockb index 408fb664cc4c2ca4367eab901e9348378966153d..9420aa44265d3eab644341382691562bfdb214a9 100755 GIT binary patch delta 3871 zcmeH~iB}Xy9>=>|P-bMfR+t$?17a`=GaNF4ih~AGVK7RFj`}=;Kv+-?1r>sVF(x93 z2bOLt9$YG-Q8W_-1lIcqtBD$ouCAMy=!$DlcQJTlzQ38C=kebD2itEx{jJ}xesy$J zb#=|P1GdW=Y*uUgE4^?0Yl(lJDYI);j&e$ul!?U`kH1&(WY40f2|p%GPjG$MljrC< zSZsVth-luIbHS#-f#VjR9Kdm;@i{3OS;+73rjR=yvFzpc|pxp;M7hn&;1P_Rvr$*>^OY(Ohv?sX$%;$GITm0U97x z1zIXjmKw;Mo063l&v7^4r;cn0;y5KV4%!161XV!8gIG~FA9qikrQdJ1pQtmnbo-}` zo%fGLhs(DvTwqc;g!bdRvRJ1O#oKt+k*LB3P1><@Lwwm{M>|=HGQU=5-2%-JbK_=g{hepfFG!1Gu_(1a`-RwzO^b7fy?m^?x+ z4A!z}NQ0IYK{$Wb5}}txV4x_{jk+dV25ST?bW{~0obqQyBlW@yL_!7zFnOe2wgLkf zi4cJm_YRSrhcyZo&tfA&1fM|G5~&vw1DSl3-nIrqsAGyzLv7oU3a8XNS228fh_}VF zI|J*r7k&)D;;NZ0np0P-KACqg$Bjl289V0|BBTa0 z`53)W5zL|?cY;|Fq`!`}K+<$f9;FwmbSxUus$)e_dYQ){R7pKy=UhUBgh3{Gv|w96 z{b`pvMcM^)NcO8r{>Ok;`x7buRi(M@1f&85Nh&~XV3W&M^1q_y_RSg8fSnXTN(JRm za&?!~UQz{=8giygNh#?f*`!pTE0kPrk}4%tNvh@%C9rGblnxZuOLF(-O+%v3+x&lQ zEMr5X{c>zfk0(tHE|@dU|JjH&d**cZUO3_EFxkD1o5u<~chz3MTpZUbZ=KcqLWT09 z!Zk_9qUsH(-Fn&hIjC33l4Bc2 zPn3UC=DfgX#*=03878sarm2_H^nQ)%^E$BfZX^44vytsssbHEd27WWk-C|^u zRw>vuSf#9gsgc>OR~_}%O=tjn;bZ8h*Uta_`F6|Pe- z#Wn-Kk4@fYWTAgiux3~$Cfjagt+3Lz8~9q*0BgOdV7;mgd>u=vGO{u273>VG2BzL& zWRGAi+F{@u*|)HEY{2tX8~A3HTaD){QJAh(huE`)O?GUpi=QcA=Te@HZ?qT615MK# z`|!NI>64}{eO(HhEfZJvSBuKS*!kN*mUp`Ih2^50{eTvc0~xriHofEYF49#8C`s?F zr3Q|xmwfcDyCak08zdiXsA{R~0m;`D>cOKlWj0FAZcyx;=Bo*JDq{y?%pQ)TXP}Dl zHRj%vdIn0p^k{IR!jBPYI^TpS?haX`Ma(nz%) zX8HH~uot&&+4A4U70@8iE=e2RQ-nPSw7Wh7zXRHtAAtXW|AKqqKDZ0oz!Jc~a$rAc9Z9mmH_%K0{(HC+U!~K4hWI=1Jva-_f%#woaA$|_D&6qO0)0VBY> zU^p<)T*V+U7SIYD2gZX4FcN$SW&k6Y2Bv^XU^4gw(5tZzN=H!-Ksp5WL@)t-0Ac~H zP+E{=Qy!`62yXN-U44R9WPh?~x@el}K^@o+Y5`5p9#8=`nvUNu?NU&JYzCV^ zIVc6WpbS)lufR@FDcMxjHn0`!0AyE5N`6`yF~3Jne$T#TKW`HFe8f>C4({f2ySnD{x6Z{CP7`=EYrY<6M0+>1Vm;B<`(`h( zukoDmGD58tha*C`qYu2%ByV!j%;UsSxi;HeWk z(T0$y6IE)<^Teq6n0}r&EMw<+)1vaQtfaL0NprQ4IoU~b)8l8yWzS7XPZpchyq|bW z&BuTE<}A-lN!O-k;gG)X%2l6B{JLK;9ZK}M;!>^Cz8^gDpTO=8yZCBDiN~hpJ#2j~ zf6&@?*}2R~yZKZFTY>yr@3)*xy((KEj&bJ$+>O|C?BQJ+syof+ICPGe`ggBq-5PiP z#(+fgQq*QAf1I9?n52nMOw35v#Al|Mt>oNH&CGczvlCypG#Q!MDH(VstD@g;L}Vvr xWt&TCC?ytq@x8rY5AoOdzwXdrjf+nHyqCB$miO#}^)5OO<-O|e#_}26{{V)1O<@24 delta 13211 zcmeI3cU%)q*Y}eEYA6bd0t!+rD3H*5R}rx{#EMcPpfn)}*a9}Npy-Gwq7>U@1;hr5 ziUJmFAfVW=E1-zj!Senl$zJBVp8J0Od;fd(<9E)?nVGX^%I=2UV^Wd)qhf_r{@{IE zRCOZ@_Q~}=Z!3KDrSz4;*Y7+>M{nvAD>vlv%89kh!K3!y(~w zLqmdsg|Sh7k&*E%Rz!G!fEoyo3=55bXEkOlmIm6FK~jU8%_aRmLv}^^wN$Qwr1npv z9Zr!D3L5yqesEhpKtSbC7^4lGs=NG>EDz)aLr z*@!Y7@jj`38roC4LP*-a(NVH=L;RtjYq;7;VwkQwJwaqtNOTxxV4}0ckzI)z7^i9x4m2dcrgH%iio_9l7OdTIK1Qx9#fo7(HGW<287s z!`!F$wx3uzl>Cfovbxu(w^S=ezU zf5`T2aej(5y4BY*_N*MgalgoFrN!bmLDi-wU#wD#seGp})qUW3n_SDfu~~b++n%X@ z<^Nao@g%Q9{P7EqE!9)@&D48kGDUay)rIppHBs^RRF-Fxb)taka}z@@aem!N-CxWO z*xBiDp31B*gM@)q{+D*mdLP=Cf2T-snTJuO-(B0AO)J{A8om3=Vf)-!L#jygyYwyd zzlL(6JEblS*ya$|mY!eNdx?HqW{ce!ws+GYMt{CzZt=o%S%+N9?=EnY)>$=-=-=JiL5}Z-9ianBin5@rSG%|U56N^&@FC&KO{ znH4!q?J$Wo*!KLpPxLQb;g z7^_MWO5!!c=p?(utS7e%lz1E@$cvZZv$Tn1%kWS^JBUxB5P^~vDS_GDH21eFcTu9# zgX!&V%jsiF94uWpLR+%P(uG@s8>D3F=w!2Dj82w39VTa_9dWR7;qHP`(gZ8OeTEV} zC5x^cr?)+6w{qd^w#QA{!S@ zm?J5M$Z;g?HZI%_+(bRlghNXE*mB)4!*r?H#NFDKv&D%N+q!V7ok+W_i-HdBme%B_ zt)qe;a;`Kt$P4qh80{4>d1`<;`a+LOF~Mne)+dkJh0{p3vZg{Al(=YNK*g2CaunxA zASc#wHz7xNH<#{Q&OKMsZtucTbt4WAE}Us@WD!KV8!3i(;6~aZdb$$_M;Fc%ce2RQ zg}WBx#85Eb*y@}r_ri8Zj)E5+7pxzEUeLb)-8{dgQd~e@R?>DHG@??THfAK5<*s2* z6`hcDf>f5)%1WwIK|*_Xlh#woAxAtl^yQ)LCDl?%TWLbl%c>96lb`MyB1o1tO4RcK zRQq2ftr{q;r;=t3rDZCqk36ZKFO{a!dMfGJ&81}-DL(%{DyVx_(uT5<>TRK?Rd&*P zSxJZM1U;2*(t0ZCiHAT^)ljMakCdEWTtQDTT-tz2+HnLVRe4F}NU0nJNl!G6R!Sw! zj+d6Hr1mC3Qq^Rs^p?sgQt2}Y8G52=QpI#gdQeHTGo*56q5hCN^8dHL#FO%W`|JP1 z{?ZyT(@Mme<7pS}=e#=JclOBH*7nHpuVDAUoUmOW!vw&FH+>Xw1v{iZLts zkx#mYC%X3=z;BqZ+*c{5pLz93vrE{@Y}T)Czp!h!i}$f4k@we{o_FZyv@YuVKRkY` zG3WZ*A6s{;ww1eVa69-ebcv;*{hW!TLOsuDZB@$s>042k9iyDNM@K*Z$m_md+3i&+ z-6jx~rJ5q44{O7g+r1Q(EX_{(D7YxK?ih7uld0E~9jaHx&fE9a^o+*Pof$5NzxZaP zr8}^O++AJtB;m&L_`r1SfSt4F@-B$4C5d-CWWDnsk=9o|-aqgDyW-M{lHM6AUt$fn zYi0U(eK9bo`#R3#etB^fSI#cedUBxu>+-H|ol_UGN4Oc878!3>8~nIYt#}qGfp?BS zofRHy)wa%`v?_YD+Po=><8I{axU^^ArwRQPoZWsV__$3tT4XZ4_RhjAP4iL9xZCzN z1?X}DmF1@__b?D>7|3|HQ`WoukZzY>23XW5yea?El3=H&wxy#IoJC&C82MaeZF_>NOB7OL6)q79c z3`#3yyvvgHE;@0-Qm^wdr+NjQaH*YlT#j{2{rst2k9PD>u`m5G@z#^Nz4HxDayM%w zt~pj;qcg)``Q5xY)xsu=Tla1ZOWAGlu9fJBG(`nD_AQGKRE*7gxX||Xn~$A4^{zg) zxzeMmbxT3u4?X_5w!$Ion`(K1G z_ficTTv?t6pBtC$Yg=2TG(r@1Rn#@N<3x`~TD!W`40@#Tw6|H&Hmx$XHs5FKK5q0a zx>UZpHMp!&H|^t&w$an$$qjgC8?*jcQ(E(ZNeym?X4aK$?AxgaH~7-x_M_(pax2^S z6*XK`KVDa)cf}{b>Gssvy5_gVK0dQoX6RTiiEp|WG$&8SyWO(htytje8m461#f^8q zUCCudY~sRQ%8#p^I!>Hz?iR}OX>#uQ+;j2TO05oqliMG0e}3C6n$f$4_0;#TqV`ph zcDEk)Axjf9MGAdu&zhxazd!gub=dT)=A*K&_L#nD=ebLlt&MX#b<_;_%j;Wa=Uyhs z?492Z2s7rLx5%jYs%JcW^r4~ZHyh)wUzhQ2udH_iwsXz5vot4+bDe86EA3Y3)zO`l z%&(8@!oNAdY}N1kDN3K+B0s&qr}cP4zDBoapO@d?Th5)KxMN+sPT7h_N=Jwu;U#K{ zGA9eK7f-HF$nex2)MZCy%yg zeS?03cbyx$#csA%|K@cYYqGv>9XHubeNlcx_i;{U-#0Ek!5Kmx!8^NmgLOkjHXhNR z8>us|WyC4-Tm24B-rDB4zC5UWnA)kXgD%X4XJai{b4*K z?)!)J-DJEwAnToB@uJ7-{v0Q3D|53;-e=Y=`_`RXO)%$;0=%1~;z$+Q+&2h+C8VviW1*4EH>lb8^Z(qnHI+m*!qnFB7C> zJE|?XKP$^DKQ>r6;0;!Dfai<6vgXgxfQg^~|jNFwIl+&7kv33(h)+%3ECvmiL$O?ue{+=_?QXunJwY z_UDCjD@$gb8~ehrId*jFxB6e#mims+A3ePG(}8iLW=_^B*xB;FP1SL|`H52Zr>6(I zeVXYhJ-OFME%(ipjoA+eJy7EbFvxrZP{xu+XHc;v2{ zaVgin>D>B~eC1wo&>c(1^oSaf;pQJ#GT!CMddJxprsA=|?2vVz-vcfU_TF%6 z>hf&6*!q?>4VP-=s*3RsYEp#XCY1KpEPvbhSE{zpkO^M%bQ?yGuUsdxUSZ`p`!bvC zTB#vAW!f;Naqu4JtbXzlJ5zhcJkQm7H1pl6vUReunwY7!ARLACA zQyl#$xK-P`r*Y1ml)yrt)n^N2+$)fEPtmkVwLZV^V$IzTN9vbzZ8tww-tX0@Gp)|m z`^T=xYk7uA-?ur99HBL6YpG!IwDnkG!nzF+^M#mU*;dj?BZl86rULnQ)YThs-o4({uQP~r-_4Cfv zqz{*uKQpu@Gh4>HLRs%rTfZNgk$z>S*B+zFOhfC;_@>8l8EFcMs29vJ_Rb*C@rs$wvz~-&jP2OKN&9i8q8h2>i&6*02je$OmqHEhKPPKD}Hn?uR zkTUJMdE2q}BrV=6*8_!_U;FQk&##Mej@;Hp|1M%4v_-PsU5xw^IBxIveiwf=b+7*X zxJUKpj?w(!YirYf{A866YPcAB<#^&W{h3-iIunlaPx8{O@3%GAb^5x&_gQI|ITus= zXc3iUP0+C}^zD($Ui}bLzh)m1WH1uG+HC=YUGyf_-PrP6o>h+=WTsipupI z9%PpONa^T)|7~f1^5*4-(tXMF?^njVGqT>rS$6eUz0rD?iQBrPb6PLQC^bA&UaoP! zOl@|Xt6J8D?`k;@<(KzU*P0;w@bhlLx68*bX~#b@O-XdW?|Sx>S;)`~QUdSNl1}&O zdwbk-MXmg|a<7)HdS7?q^`}lIat;&E`?}4K>Cpd__1x+6gW02oebt?r9)2gIWzGtr zdf3R@etB)*b|*Q@cvm9p-I&wuEvFg|T;9b`SMxpIaW-3})kN3%89U)-S(9Giw%pox z1I4F`nw z4>~>U=xTY#=%m%{-yhF@+V_gA+U#3!dS1oO_}^1%ZdiW|V}Dutdve$!Et4DL<}I+x zt&;KXysURByDaS0TaO%n{<~1i|44qv7Z2fv4MAUjj+ju@Q8Gv0TWjTuZo1Y(MEo69 zncpmeug#vJ9r$}iO39H5cas#oJrQqY3}28n?9(>Ku3*Th7mK{F9X~eyXvoWz!rP~E zpSJ8`-^O>S}+>b24^IS@7APzCc}!xlFh?ZT2mqi=m+Q* zhl%!hRQ`TGd4GLlYSqI`_i(-->hVTS-ijQPl2d}o=U2_~N?Kfg_*SwI6DXJzYL5{*3VK*c2~o)ku!vw*|b7gID?s{d;wu zyW#m--`fkRU6J(9t}DFhEa2Q+HYE1 zmG^qj*IQ-rrsd?0^}MRfnM0pMe)te_q9R)19e-X=8HdYd?UqbW6DVgr^8ZjjXjXOZ z(u(G$1d;Mur+kCIR+gRIdpvtpzndwy;#wayelAI#`L*hGhyOKE?tp{geJtYyTkBm` z-;yz0A!|5v?ZD&a@q^~g%H=A>)`sr;5KXGH_M5CuGu7-lMexpiqkPo&V^LkRG!7>z z%yGG!`uUvv)%s<7r(C%*Z_LoB)~$&$hA+z+UZm40@k9T8i+XjWvci?gS>3MmS{pI4u_fVM&fOUc6YEyznO!`e9(2Q_u6T@4#_koeV5txL zD!GdE8qr_o!>%OpNUxJBq&LW5kq`SONkn>!)FZu3%oBXrRb&m)YVs869b%v8!>%El zkk*n$q;rv|*`V5uzM^8jO!|j}7m7B8v1z3e^HmT^BHH9F z?yzm0-RAXyByy92vh-7)057K%3lrC$5qDByzLRysd)=;3_hFNSb@Jpwnmm!WG$9Gf zs$_)9pkv#I_CL0LbC;e?l7*c5Rw5OhqN4)%#Jj{o^y|0epcS}^6RGZrREK2|zgwo~dMego2r9_@1oU_Y zXdgNQ_>>_&o+G8YZU7$(#7@0{q|T@UER0z93X-;?FV*RbYijp3pt|lL08qPc0M+#X z4s;5s;YL7}>cA1ugDx7?(HX!?e(}+alOF;dme!d5-;2mfIi@;*A1S|z~-V(raumY?E ztH5fI1d_oTuokStS0vVYWKzHeuo0w!O<*%1U<*hC>0leVao!ZS9ekz)o?sa80Iq<( zQMU#*fX*JBtuR_H8rfEyqi>;rT{t^vB#bZO6k5^w>Of+HXw90Ec*Lb~pB z>gl@Db)@S?-!_pO5a`SB!N3U6D}r7D13`Dt1E>S~Oi}=Jo#@8D z32uSgpbC_Og>;5vk+A@lz#7nX?hAB*9?%DLKhSljA3$_KU!V*0fIgt_Ibb>%28M#EU>ZoId9*Kp#GOba{_;RS zJ^g^f3fKVpHNq6o4?4e5rgww@U4$gxT zkO%excd#2|fn6X2WP%-1xf3!Q(1%DaWDeK^=!QQE4g&!=0R95|!9g$>(2Y*pVB?F# zyZ;mlCjp&UdL^6y#{e~O925dNA7=p{oB_q42$+G>APvyIw0sU!fO2pVlz~g&0w@Jk zCy^o+s}hB4;3~KY=oJ+Y=p{_|VLiADZcEE_5~~5dVrl^`*GMJR(+y2^_oeb7#1TI(2>C&k)Ndxfj0yhXu<6B04<-VQ2@m41=_Sk}Rrh*| zm^MsQ8WS?aVOwCLFftYsm&ODQ{W+r`JRl-4Bs^$kO4{Tnt2Py>vwbn^bZAWM8WUK= zVK0OGd`sLgd6LK|Y3Pw?B+St{M5q5Shc=QLCe)3ILqZ>0oCxNa7&a!r2sO}CYfQ8o z6N)6RG2>&rA>qM-s1RYaT1Jxw@x9l15}gbj2%Hl~b1^|pV$ZNy@F>RRs01O6ds3F# z!daN$J0>!Uso7lBQvqH1wxsJ#Z4ncJ$Am|5*mO2v_4V?${QRPY&nF%RFjV0-NtIrq< zy3v?FVbk6U#;&A!l!BS}=DACXSK`auXXBd&)#!GGTG# z^bKuo=DPfEcKiQt4J3POj4g?5(9~w)FgFydyw2F2ttR$aypaNJ$({xaj-ws92a#$= zl%HsFrrDDX5dQYW?TMxh6I^-PSh!p{Pj5DdjoZEWV8S(xJvVI89%b`^4w!F_`>v(f zf(3``KzyHSa?~BjL)hErK)yV&;50fAqo;q@H9C?wlvX>D!%s(RGjW*i?-Z zdH8Iy_69e}ooDgepc~2En+>oJ=nle@pxTX$elGSo;rZzPZ|Lto9Le-IAP&hNx*_4w zLPW~)CHGJA@wo}ndohPRd12PO++Fg=`bs+HC$zfMa#md`9Sdilc0Q~X@z3xgJg zM+6FZeu04z;XJ>{5VtVDNdJg9Q?WKOO27+>kCe^hZ~1@J5%<I&)S;QjJSkef2w#kJ9TF3^_6i7 z{bWi!dbdc=(PZnTr>e3Q28xHk3{zY{3+xy=#@Jv#VQdgDCMuLpkj0-O+W#qtJ(si> z_=g0B@&Y2l5$GKq5)qzWs>IeF9Q$u|H0}RClsrqK`>UTSee|N93LbsRh<)wJ1d*eL z$)bHuOF#1Do2r}*-;(J6(pQtbz(E^H1}!j2WX?O~{k#)u=B4bMElo39T^K~AycBegXFx{C%)f$$yws}-iHtRWYGr$ zb@9a@URz8}fS-_u?U}}A>u4|q*!fe#Go$m+gL(d=j>)FEvIlgNJUyY6WYSfYp`sL%)mj)w>N;o^uD@Pzn9A>hShJ4b|LG6ZxDg;5cq zyl7z*T8Y=u@;}$n#r!WdJaf829{)ZEI{aHD-j(7#6etK41o;UC(O9KGKYzFa7X*G` zbO(C=O9w;$((rI4V5PCn631r;3h)z85EUZ~3FQR}a4W#{(}nJ%`%e{2{Zk>HSKWVF zL8E^v(i;@n1GN5j8TEhXna!XpJKNNN%;eKi)1~qf|L#GhSYizLNlwQg{wu&zZ^;rp z$#0$55PFlt|A7b(_KW7lgon_v1w@2KMDe0yA|oTBgmhQw$&_L9ANBCcTyjN`)h)dh z=$UNO7bvp(X-KW1gN$vS1>K4^f6j~QKLxQd>%SefppJ?YvCCHU?$urVQwO~yzj>Ge zcccG<0$M8Qz9gk@&DrLpwY{GX{^CIeQXXAM@kKK?nkW6A6XJc?=TDjLJ(&{RrFVqy zt?C#Vy(cEJ*}6TYkAF0k=6F_;Y45@Oovvzx8`ae(yjp$uI*I-(%D3RoQ*m*3`FCnriF_rT+&-$`msI diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..cc1f470 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,126 @@ +services: + # Database PostgreSQL + postgres: + image: postgres:15-alpine + container_name: dataviz-postgres + restart: unless-stopped + environment: + POSTGRES_DB: dataviz + POSTGRES_USER: dataviz + POSTGRES_PASSWORD: dataviz123 + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U dataviz -d dataviz"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - dataviz-network + + # Redis per cache (opzionale) + redis: + image: redis:7-alpine + container_name: dataviz-redis + restart: unless-stopped + ports: + - "6379:6379" + command: redis-server --appendonly yes + volumes: + - redis_data:/data + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 3s + retries: 5 + networks: + - dataviz-network + + # Migrazione database + dataviz-migrate: + build: + context: . + dockerfile: Dockerfile.dev + container_name: dataviz-migrate + environment: + DATABASE_URL: postgresql://dataviz:dataviz123@postgres:5432/dataviz + depends_on: + postgres: + condition: service_healthy + networks: + - dataviz-network + volumes: + # Mount solo per avere accesso al schema Prisma + - ./prisma:/usr/src/app/prisma:ro + working_dir: /usr/src/app + command: ["npx", "prisma", "migrate", "deploy"] + restart: "no" + + # Applicazione DataViz + dataviz-app: + build: + context: . + dockerfile: Dockerfile.dev + container_name: dataviz-app + restart: unless-stopped + environment: + NODE_ENV: development + PORT: 3003 + DATABASE_URL: postgresql://dataviz:dataviz123@postgres:5432/dataviz + JWT_SECRET: dev-jwt-secret-very-long-and-secure + RESEND_API_KEY: ${RESEND_API_KEY:-} + OPENAI_API_KEY: ${OPENAI_API_KEY:-} + DOMAINS: http://localhost:3000,http://localhost:3001 + ports: + - "3003:3003" + volumes: + # Hot reload per sviluppo - mount del codice sorgente + - .:/usr/src/app:cached + - /usr/src/app/node_modules + depends_on: + postgres: + condition: service_healthy + dataviz-migrate: + condition: service_completed_successfully + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:3003/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + networks: + - dataviz-network + + # PgAdmin per gestione database (opzionale) + pgadmin: + image: dpage/pgadmin4:latest + container_name: dataviz-pgadmin + restart: unless-stopped + environment: + PGADMIN_DEFAULT_EMAIL: admin@dataviz.local + PGADMIN_DEFAULT_PASSWORD: admin123 + PGADMIN_CONFIG_SERVER_MODE: 'False' + ports: + - "5050:80" + volumes: + - pgadmin_data:/var/lib/pgadmin + depends_on: + - postgres + networks: + - dataviz-network + profiles: + - tools + +volumes: + postgres_data: + driver: local + redis_data: + driver: local + pgadmin_data: + driver: local + +networks: + dataviz-network: + driver: bridge \ No newline at end of file diff --git a/index.ts b/index.ts index 99b7484..90f719d 100644 --- a/index.ts +++ b/index.ts @@ -7,6 +7,8 @@ import authRouter from "./routes/auth.ts"; import chartRouter from "./routes/charts.ts"; import dashRouter from "./routes/dashboards.ts"; import suggestionsRouter from "./routes/hints.ts"; +import healthRouter from "./routes/health.ts"; + // import seedUsers from "./seeds/seed-users.ts"; // import * as db from "./lib/db.ts"; @@ -14,47 +16,110 @@ const whitelist = process?.env.DOMAINS?.split(",") || "http://localhost:3000"; const app = express(); -// app.use(helmet()); -// app.use( -// cors({ -// origin: whitelist, -// // credentials: true, -// // origin: whitelist, -// // methods: ["OPTIONS", "GET", "POST", "PUT", "PATCH", "DELETE"], -// }) -// ); -app.use( - cors({ - origin: ["localhost", "http://localhost:3000"], - credentials: true, - }) -); +// Configurazione CORS piΓΉ flessibile per sviluppo locale +const corsOptions = { + origin: process.env.NODE_ENV === 'production' + ? whitelist + : ["http://localhost:3000", "http://localhost:3001", "http://localhost:5173"], + credentials: true, + methods: ["OPTIONS", "GET", "POST", "PUT", "PATCH", "DELETE"], + allowedHeaders: ["Content-Type", "Authorization", "Cookie"] +}; + +// Middleware di sicurezza +if (process.env.NODE_ENV === 'production') { + app.use(helmet()); +} + +app.use(cors(corsOptions)); app.use(cookieParser()); -app.use(express.json()); +app.use(express.json({ limit: '10mb' })); // Aumentato limite per dashboard con molti dati + +// Health checks PRIMA di altri middleware per evitare autenticazione +app.use("/", healthRouter as Router); + +// Middleware di autenticazione app.use(middlewares.checkAuthCookie); -// app.use(middlewares.deserializeUser); -const port = 3003; +const port = parseInt(process.env.PORT || "3003"); + +// Root endpoint app.get("/", (req, res) => { - res.json("Hello World!"); + res.json({ + service: "dataviz-srv", + version: process.env.npm_package_version || "1.0.0", + environment: process.env.NODE_ENV || "development", + timestamp: new Date().toISOString(), + endpoints: { + health: "/api/health", + ready: "/api/ready", + startup: "/api/startup", + metrics: "/api/metrics", + auth: "/auth", + charts: "/charts", + dashboards: "/dashboards", + hints: "/hints" + } + }); }); -/* SEED +/* SEED - Decommentare se serve app.get("/seed", (req, res) => { const result = seedUsers(); res.json({ result }); }); */ +// API Routes app.use("/auth", authRouter as Router); app.use("/charts", chartRouter as Router); app.use("/dashboards", dashRouter as Router); app.use("/hints", suggestionsRouter as Router); +// Error handling middleware app.use(middlewares.notFound); app.use(middlewares.errorHandler); -app.listen(port, () => { - console.log(`Listening on port ${port}...`); + +// Graceful shutdown handling +process.on('SIGTERM', async () => { + console.log('SIGTERM received, shutting down gracefully...'); + + // Chiudi connessioni database + try { + const { prisma } = await import("./lib/db.ts"); + await prisma.$disconnect(); + console.log('Database connections closed'); + } catch (error) { + console.error('Error closing database connections:', error); + } + + process.exit(0); +}); + +process.on('SIGINT', async () => { + console.log('SIGINT received, shutting down gracefully...'); + + try { + const { prisma } = await import("./lib/db.ts"); + await prisma.$disconnect(); + console.log('Database connections closed'); + } catch (error) { + console.error('Error closing database connections:', error); + } + + process.exit(0); +}); + +// Start server +app.listen(port, '0.0.0.0', () => { + console.log(`πŸš€ DataViz Server listening on port ${port}`); + console.log(`πŸ“Š Environment: ${process.env.NODE_ENV || 'development'}`); + console.log(`πŸ”— Health check: http://localhost:${port}/health`); + console.log(`⚑ Ready check: http://localhost:${port}/ready`); + + if (process.env.NODE_ENV !== 'production') { + console.log(`πŸ“– API docs: http://localhost:${port}/`); + } }); -export default app; +export default app; \ No newline at end of file diff --git a/lib/email.ts b/lib/email.ts index cd4ebd5..1db36c4 100644 --- a/lib/email.ts +++ b/lib/email.ts @@ -1,31 +1,50 @@ import { Resend } from "resend"; -const resend = new Resend(process.env.RESEND_API_KEY || ""); -const SENDER_EMAIL = process.env.SENDER_EMAIL || ""; -const HOST = process.env.HOST || "/"; +// Usa una key dummy se non configurata per evitare errori al startup +const RESEND_API_KEY = process.env.RESEND_API_KEY || "re_dummy_key_for_development"; +const resend = new Resend(RESEND_API_KEY); + +const SENDER_EMAIL = process.env.SENDER_EMAIL || "noreply@localhost"; +const HOST = process.env.HOST || "http://localhost:3003"; const COPY = "Dataviz"; -async function sendMail(addresses: string[], html: string) { - await resend.emails.send({ - from: SENDER_EMAIL, - to: addresses.join(";"), - subject: "Activate Account", - html, - }); +async function sendMail(addresses: string[], html: string, subject: string = "Dataviz Notification") { + // Skip email in development se la key Γ¨ dummy + if (RESEND_API_KEY === "re_dummy_key_for_development") { + console.log("πŸ“§ Email sending skipped (development mode)"); + console.log(`To: ${addresses.join(", ")}`); + console.log(`Subject: ${subject}`); + console.log(`HTML preview: ${html.substring(0, 200)}...`); + return { success: true, messageId: "dev-mode-skip" }; + } + + try { + const result = await resend.emails.send({ + from: SENDER_EMAIL, + to: addresses, + subject, + html, + }); + console.log("πŸ“§ Email sent successfully:", result); + return result; + } catch (error) { + console.error("πŸ“§ Email sending failed:", error); + throw error; + } } function compileTemplate(template: string, data: any) { return template.replace(/\${(.*?)}/g, (_, key) => data[key]); } -export async function sendActivationEmail(recipeint: string, pin: string[]) { +export async function sendActivationEmail(recipient: string, pin: string[]) { const html = activationTemplate(HOST, pin); - await sendMail([recipeint], html); + return await sendMail([recipient], html, "Activate Your Account"); } -export async function sendResetPasswordEmail(recipeint: string, pin: string[]) { +export async function sendResetPasswordEmail(recipient: string, pin: string[]) { const html = resetTemplate(HOST, pin); - await sendMail([recipeint], html); + return await sendMail([recipient], html, "Reset Your Password"); } function resetTemplate(baseUrl: string, pin: string[]) { @@ -35,9 +54,49 @@ function resetTemplate(baseUrl: string, pin: string[]) { return `${item}`; }) .join(""); + return ` -
Reset Password

Reset Password

Click the following link to reset your password

${url}

Or, copy and paste this temporary code:

${code}

If you didn't try to reset your password, you can safely ignore this email.

${COPY}

- `; + + + + + + +
+ Reset Password
+
+ + + + + + + +
+

+ Reset Password +

+

+ Click the following link to reset your password +

+ + ${url} + +

+ Or, copy and paste this temporary code: +

+
+ ${code} +
+

+ If you didn't try to reset your password, you can safely ignore this email. +

+

+ ${COPY} +

+
+ +`; } function activationTemplate(baseUrl: string, pin: string[]) { @@ -49,32 +108,56 @@ function activationTemplate(baseUrl: string, pin: string[]) { .join(""); return ` -
Confirm Your Email

Activate

Click the following link to confirm your email

${url}

Or, copy and paste this temporary code:

${code}

If you didn't try to signup, you can safely ignore this email.

${COPY}

- `; + + + + + + +
+ Confirm Your Email
+
+ + + + + + + +
+

+ Activate Your Account +

+

+ Click the following link to confirm your email +

+ + ${url} + +

+ Or, copy and paste this temporary code: +

+
+ ${code} +
+

+ If you didn't try to signup, you can safely ignore this email. +

+

+ ${COPY} +

+
+ +`; } -// function compileTemplate(template: string, data: any) { -// return template.replace(/\$(.*?)\$/g, (_, key) => data[key.toLowerCase()]); -// } - -// export async function sendActivationEmail(email: string, token: string) { -// const frontendUrl = process.env.FE_HOST; -// const template = await fs.readFile(`data/email-templates/activate.html`); -// const html = compileTemplate(template.toString(), { -// token, -// frontendUrl, -// }); -// await senMail(email, html); -// } - -// export async function sendResetPasswordEmail(email: string, token: string) { -// const frontendUrl = process.env.FE_HOST; -// const template = await fs.readFile(`data/email-templates/reset.html`); - -// const html = compileTemplate(template.toString(), { -// token, -// frontendUrl, -// }); - -// await senMail(email, html); -// } +// Utility per test email in development +export async function testEmailSystem() { + if (RESEND_API_KEY === "re_dummy_key_for_development") { + console.log("πŸ“§ Email system: Development mode (emails will be logged, not sent)"); + return { mode: "development", configured: false }; + } else { + console.log("πŸ“§ Email system: Production mode with Resend API"); + return { mode: "production", configured: true }; + } +} \ No newline at end of file diff --git a/package.json b/package.json index 9254e72..a2b85a1 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "@prisma/client": "^5.17.0", "@types/express": "^4.17.21", "axios": "^1.7.7", - "bcrypt": "^5.1.1", + "bcrypt": "^6.0.0", "cookie-parser": "^1.4.6", "cors": "^2.8.5", "cuid": "^3.0.0", diff --git a/routes/health.ts b/routes/health.ts new file mode 100644 index 0000000..7109021 --- /dev/null +++ b/routes/health.ts @@ -0,0 +1,227 @@ +// routes/health.ts +import { Router, Request, Response } from "express"; +import { prisma } from "../lib/db.js"; + +const router = Router(); + +// Basic health check - per liveness probe +router.get("/api/health", async (req: Request, res: Response) => { + try { + res.status(200).json({ + status: "healthy", + timestamp: new Date().toISOString(), + uptime: Math.floor(process.uptime()), + version: process.env.npm_package_version || "1.0.0", + service: "dataviz-srv" + }); + } catch (error) { + res.status(503).json({ + status: "unhealthy", + timestamp: new Date().toISOString(), + error: "Service unavailable" + }); + } +}); + +// Readiness check - verifica dipendenze esterne +router.get("/api/ready", async (req: Request, res: Response) => { + const checks = { + database: false, + timestamp: new Date().toISOString(), + service: "dataviz-srv" + }; + + let allHealthy = true; + + try { + // Test database connection con timeout + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Database timeout')), 5000); + }); + + const dbPromise = prisma.$queryRaw`SELECT 1 as health_check`; + + await Promise.race([dbPromise, timeoutPromise]); + checks.database = true; + } catch (error) { + console.error('Database health check failed:', error); + checks.database = false; + allHealthy = false; + } + + const status = allHealthy ? 200 : 503; + + res.status(status).json({ + status: allHealthy ? "ready" : "not ready", + checks, + ...(!allHealthy && { + message: "One or more dependencies are not ready" + }) + }); +}); + +// Detailed health check con piΓΉ informazioni +router.get("/health/detailed", async (req: Request, res: Response) => { + const health = { + status: "healthy", + timestamp: new Date().toISOString(), + uptime: Math.floor(process.uptime()), + version: process.env.npm_package_version || "1.0.0", + environment: process.env.NODE_ENV || "development", + service: "dataviz-srv", + memory: { + used: Math.round(process.memoryUsage().heapUsed / 1024 / 1024), + total: Math.round(process.memoryUsage().heapTotal / 1024 / 1024), + external: Math.round(process.memoryUsage().external / 1024 / 1024), + rss: Math.round(process.memoryUsage().rss / 1024 / 1024) + }, + dependencies: { + database: false, + external_services: { + resend: process.env.RESEND_API_KEY ? "configured" : "not configured", + openai: process.env.OPENAI_API_KEY ? "configured" : "not configured" + } + }, + database_info: {} + }; + + let allHealthy = true; + + try { + // Test database connection + const dbResult = await prisma.$queryRaw` + SELECT + current_database() as database_name, + version() as database_version, + current_user as database_user, + inet_server_addr() as server_addr + ` as any[]; + + health.dependencies.database = true; + if (dbResult && dbResult[0]) { + health.database_info = { + name: dbResult[0].database_name, + version: dbResult[0].database_version?.split(' ')[0] || 'unknown', + user: dbResult[0].database_user, + server: dbResult[0].server_addr || 'localhost' + }; + } + + // Test database tables + const tableCount = await prisma.$queryRaw` + SELECT count(*) as table_count + FROM information_schema.tables + WHERE table_schema = 'public' + ` as any[]; + + health.database_info.tables = parseInt(tableCount[0]?.table_count || '0'); + + } catch (error) { + console.error('Database detailed check failed:', error); + health.dependencies.database = false; + health.database_info = { error: 'Connection failed' }; + allHealthy = false; + } + + if (!allHealthy) { + health.status = "degraded"; + } + + const status = allHealthy ? 200 : 503; + res.status(status).json(health); +}); + +// Startup probe - verifica che l'app sia completamente inizializzata +router.get("/api/startup", async (req: Request, res: Response) => { + try { + // Verifica che Prisma sia inizializzato + await prisma.$connect(); + + // Verifica che le tabelle principali esistano + const tables = await prisma.$queryRaw` + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name IN ('User', 'Chart', 'Dashboard') + ` as any[]; + + if (tables.length < 3) { + throw new Error('Required tables not found'); + } + + res.status(200).json({ + status: "started", + timestamp: new Date().toISOString(), + message: "Application fully initialized", + tables_found: tables.length + }); + } catch (error) { + res.status(503).json({ + status: "starting", + timestamp: new Date().toISOString(), + error: "Application still initializing" + }); + } +}); + +// Metrics endpoint per Prometheus +router.get("/api/metrics", async (req: Request, res: Response) => { + try { + let dbStatus = 0; + let userCount = 0; + let chartCount = 0; + let dashboardCount = 0; + + try { + await prisma.$queryRaw`SELECT 1`; + dbStatus = 1; + + // Conta records principali + userCount = await prisma.user.count(); + chartCount = await prisma.chart.count(); + dashboardCount = await prisma.dashboard.count(); + } catch (error) { + console.error('Metrics DB query failed:', error); + } + + const metrics = ` +# HELP dataviz_uptime_seconds Application uptime in seconds +# TYPE dataviz_uptime_seconds counter +dataviz_uptime_seconds ${Math.floor(process.uptime())} + +# HELP dataviz_memory_usage_bytes Memory usage in bytes +# TYPE dataviz_memory_usage_bytes gauge +dataviz_memory_usage_bytes{type="heap_used"} ${process.memoryUsage().heapUsed} +dataviz_memory_usage_bytes{type="heap_total"} ${process.memoryUsage().heapTotal} +dataviz_memory_usage_bytes{type="external"} ${process.memoryUsage().external} +dataviz_memory_usage_bytes{type="rss"} ${process.memoryUsage().rss} + +# HELP dataviz_database_status Database connection status (1=connected, 0=disconnected) +# TYPE dataviz_database_status gauge +dataviz_database_status ${dbStatus} + +# HELP dataviz_users_total Total number of users +# TYPE dataviz_users_total gauge +dataviz_users_total ${userCount} + +# HELP dataviz_charts_total Total number of charts +# TYPE dataviz_charts_total gauge +dataviz_charts_total ${chartCount} + +# HELP dataviz_dashboards_total Total number of dashboards +# TYPE dataviz_dashboards_total gauge +dataviz_dashboards_total ${dashboardCount} + +# HELP dataviz_build_info Build information +# TYPE dataviz_build_info gauge +dataviz_build_info{version="${process.env.npm_package_version || '1.0.0'}",environment="${process.env.NODE_ENV || 'development'}",service="dataviz-srv"} 1 +`; + + res.set('Content-Type', 'text/plain'); + res.send(metrics.trim()); + } catch (error) { + res.status(500).set('Content-Type', 'text/plain').send('# Error generating metrics'); + } +}); + +export default router; \ No newline at end of file diff --git a/types.ts b/types.ts index b08fd12..2d3b49a 100644 --- a/types.ts +++ b/types.ts @@ -1,35 +1,23 @@ -import * as z from "zod"; - -export const paramsWithIdSchema = z.object({ - id: z.string().min(1, "Invalid id"), -}); - -export type ParamsWithId = z.infer; +export interface ParsedToken { + userId: string; + email?: string; + iat?: number; + exp?: number; +} export interface ErrorResponse { error: { - stack?: string; message: string; + stack?: string; }; } -export interface ServerResponse { - data?: any; - error?: ErrorResponse; -} - export interface RequestValidators { - params?: z.AnyZodObject; - body?: z.AnyZodObject; - query?: z.AnyZodObject; -} - -export interface TokensResponseInterface { - access_token: string; - refresh_token?: string; + params?: any; + body?: any; + query?: any; } -export interface ParsedToken { - exp: number; - userId: string; +export interface AuthenticatedRequest extends Request { + user?: ParsedToken; } From 16456f4833acae5e48bde65c200b9cfef9f1c7d6 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Sun, 15 Jun 2025 14:31:35 +0200 Subject: [PATCH 02/13] feat: implement structured JSON logging - Add centralized logger with JSON output - Fix duplicate 404 logging issue - Clean error responses without stack traces - Add request logging middleware - Support LOG_LEVEL environment variable --- docker-compose.yaml | 2 +- index.ts | 20 +++++--- lib/logger.ts | 119 ++++++++++++++++++++++++++++++++++++++++++++ lib/middlewares.ts | 54 ++++++++++++++++---- routes/health.ts | 14 ++++-- 5 files changed, 184 insertions(+), 25 deletions(-) create mode 100644 lib/logger.ts diff --git a/docker-compose.yaml b/docker-compose.yaml index cc1f470..453a0c6 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -85,7 +85,7 @@ services: dataviz-migrate: condition: service_completed_successfully healthcheck: - test: ["CMD-SHELL", "curl -f http://localhost:3003/health || exit 1"] + test: ["CMD-SHELL", "curl -f http://localhost:3003/api/health || exit 1"] interval: 30s timeout: 10s retries: 3 diff --git a/index.ts b/index.ts index 90f719d..b336b1a 100644 --- a/index.ts +++ b/index.ts @@ -8,6 +8,7 @@ import chartRouter from "./routes/charts.ts"; import dashRouter from "./routes/dashboards.ts"; import suggestionsRouter from "./routes/hints.ts"; import healthRouter from "./routes/health.ts"; +import { logger } from "./lib/logger.ts"; // import seedUsers from "./seeds/seed-users.ts"; // import * as db from "./lib/db.ts"; @@ -38,6 +39,8 @@ app.use(express.json({ limit: '10mb' })); // Aumentato limite per dashboard con // Health checks PRIMA di altri middleware per evitare autenticazione app.use("/", healthRouter as Router); +app.use(middlewares.requestLogger); + // Middleware di autenticazione app.use(middlewares.checkAuthCookie); @@ -112,14 +115,15 @@ process.on('SIGINT', async () => { // Start server app.listen(port, '0.0.0.0', () => { - console.log(`πŸš€ DataViz Server listening on port ${port}`); - console.log(`πŸ“Š Environment: ${process.env.NODE_ENV || 'development'}`); - console.log(`πŸ”— Health check: http://localhost:${port}/health`); - console.log(`⚑ Ready check: http://localhost:${port}/ready`); - - if (process.env.NODE_ENV !== 'production') { - console.log(`πŸ“– API docs: http://localhost:${port}/`); - } + logger.info('Server started', { + port, + environment: process.env.NODE_ENV || 'development', + endpoints: { + health: '/api/health', + ready: '/api/ready', + metrics: '/api/metrics' + } + }); }); export default app; \ No newline at end of file diff --git a/lib/logger.ts b/lib/logger.ts new file mode 100644 index 0000000..f60238a --- /dev/null +++ b/lib/logger.ts @@ -0,0 +1,119 @@ +interface LogEntry { + service: string; + version: string; + environment: string; + timestamp: string; + level: 'debug' | 'info' | 'warn' | 'error'; + message: string; + [key: string]: any; +} + +class Logger { + private service: string; + private version: string; + private environment: string; + private logLevel: number; + + constructor() { + this.service = 'dataviz-srv'; + this.version = process.env.npm_package_version || '1.0.0'; + this.environment = process.env.NODE_ENV || 'development'; + + const levels = { debug: 0, info: 1, warn: 2, error: 3 }; + this.logLevel = levels[process.env.LOG_LEVEL as keyof typeof levels] ?? + (this.environment === 'production' ? 1 : 0); + } + + private shouldLog(level: LogEntry['level']): boolean { + const levels = { debug: 0, info: 1, warn: 2, error: 3 }; + return levels[level] >= this.logLevel; + } + + private log(level: LogEntry['level'], message: string, meta: Record = {}) { + if (!this.shouldLog(level)) return; + + const entry: LogEntry = { + service: this.service, + version: this.version, + environment: this.environment, + timestamp: new Date().toISOString(), + level, + message, + ...meta + }; + + const spacing = this.environment === 'development' ? 2 : undefined; + console.log(JSON.stringify(entry, null, spacing)); + } + + debug(message: string, meta?: Record) { + this.log('debug', message, meta); + } + + info(message: string, meta?: Record) { + this.log('info', message, meta); + } + + warn(message: string, meta?: Record) { + this.log('warn', message, meta); + } + + error(message: string, error?: Error | Record) { + const meta = error instanceof Error + ? { error_message: error.message, stack: error.stack } + : error || {}; + this.log('error', message, meta); + } + + request(req: any, res: any, duration?: number) { + this.info('HTTP request', { + method: req.method, + url: req.url, + status: res.statusCode, + duration_ms: duration, + user_id: req.user?.userId, + ip: req.ip, + user_agent: req.get('User-Agent') + }); + } + + httpError(message: string, statusCode: number, req?: any, meta: Record = {}) { + const level = statusCode >= 500 ? 'error' : statusCode === 404 ? 'info' : 'warn'; + + this.log(level, message, { + status_code: statusCode, + method: req?.method, + url: req?.originalUrl, + user_id: req?.user?.userId, + ...meta + }); + } + + health(endpoint: string, status: 'healthy' | 'unhealthy', checks?: Record) { + const level = status === 'healthy' ? 'info' : 'warn'; + this.log(level, `Health check ${status}`, { + endpoint, + status, + ...checks + }); + } + + database(operation: string, model: string, duration?: number, error?: Error) { + if (error) { + this.error(`Database ${operation} failed`, { + operation, + model, + duration_ms: duration, + error_message: error.message + }); + } else { + this.debug(`Database ${operation}`, { + operation, + model, + duration_ms: duration + }); + } + } +} + +export const logger = new Logger(); \ No newline at end of file diff --git a/lib/middlewares.ts b/lib/middlewares.ts index e188eaa..c8b29b9 100644 --- a/lib/middlewares.ts +++ b/lib/middlewares.ts @@ -2,10 +2,26 @@ import { verifyAccessToken } from "./jwt"; import { ZodError } from "zod"; import type { ErrorResponse, RequestValidators } from "../types"; import type { NextFunction, Response, Request } from "express"; +import { logger } from "./logger.js"; + +export function requestLogger(req: Request, res: Response, next: NextFunction) { + const start = Date.now(); + + res.on('finish', () => { + // Skip logging per errori che giΓ  vengono loggati + if (res.statusCode >= 400) return; + + const duration = Date.now() - start; + logger.request(req, res, duration); + }); + + next(); +} export function notFound(req: Request, res: Response, next: NextFunction) { res.status(404); - const error = new Error(`πŸ” - Not Found - ${req.originalUrl}`); + const error = new Error(`Not Found - ${req.originalUrl}`); + (error as any).isNotFound = true; next(error); } @@ -13,16 +29,28 @@ export function errorHandler( err: Error, req: Request, res: Response, - // eslint-disable-next-line @typescript-eslint/no-unused-vars next: NextFunction ) { const statusCode = res.statusCode !== 200 ? res.statusCode : 500; res.status(statusCode); + + if ((err as any).isNotFound) { + logger.info('Route not found', { + method: req.method, + url: req.originalUrl + }); + } else if (statusCode >= 500) { + logger.error(err.message, { + method: req.method, + url: req.originalUrl, + stack: err.stack + }); + } + res.json({ error: { - message: err.message, - stack: process.env.NODE_ENV === "production" ? "πŸ₯ž" : err.stack, - }, + message: err.message + } }); } @@ -42,6 +70,11 @@ export function validateRequest(validators: RequestValidators) { } catch (error) { if (error instanceof ZodError) { res.status(400); + logger.warn('Validation error', { + errors: error.errors, + url: req.originalUrl, + method: req.method + }); } next(error); } @@ -49,18 +82,16 @@ export function validateRequest(validators: RequestValidators) { } export function checkAuthCookie(req: any, res: Response, next: NextFunction) { - console.log("checkAuthCookie"); - console.log("Cookies: ", req.cookies); try { const accessToken = req.cookies["access_token"] || null; - console.log("ACCESS TOKEN", accessToken); if (!accessToken) { - return; + return next(); } const payload = verifyAccessToken(accessToken) as any; req.user = payload; + logger.debug('User authenticated', { userId: payload.userId }); } catch (error) { - console.log("ERROR", error); + logger.warn('Authentication failed', { error: error instanceof Error ? error.message : error }); req.user = null; } finally { next(); @@ -90,6 +121,7 @@ export function requireUser(req: any, res: Response, next: NextFunction) { const user = req.user; if (!user) { res.status(401); + logger.warn('Unauthorized access attempt', { url: req.originalUrl }); throw new Error("Unauthorized."); } @@ -97,4 +129,4 @@ export function requireUser(req: any, res: Response, next: NextFunction) { } catch (error) { next(error); } -} +} \ No newline at end of file diff --git a/routes/health.ts b/routes/health.ts index 7109021..f8ecc9a 100644 --- a/routes/health.ts +++ b/routes/health.ts @@ -1,4 +1,4 @@ -// routes/health.ts +import { logger } from "../lib/logger.js"; import { Router, Request, Response } from "express"; import { prisma } from "../lib/db.js"; @@ -7,14 +7,18 @@ const router = Router(); // Basic health check - per liveness probe router.get("/api/health", async (req: Request, res: Response) => { try { - res.status(200).json({ + const healthData = { status: "healthy", timestamp: new Date().toISOString(), uptime: Math.floor(process.uptime()), version: process.env.npm_package_version || "1.0.0", service: "dataviz-srv" - }); + }; + + logger.health('/api/health', 'healthy'); + res.status(200).json(healthData); } catch (error) { + logger.health('/api/health', 'unhealthy', { error }); res.status(503).json({ status: "unhealthy", timestamp: new Date().toISOString(), @@ -34,7 +38,6 @@ router.get("/api/ready", async (req: Request, res: Response) => { let allHealthy = true; try { - // Test database connection con timeout const timeoutPromise = new Promise((_, reject) => { setTimeout(() => reject(new Error('Database timeout')), 5000); }); @@ -44,12 +47,13 @@ router.get("/api/ready", async (req: Request, res: Response) => { await Promise.race([dbPromise, timeoutPromise]); checks.database = true; } catch (error) { - console.error('Database health check failed:', error); + logger.error('Database health check failed', error instanceof Error ? error : { error }); checks.database = false; allHealthy = false; } const status = allHealthy ? 200 : 503; + logger.health('/api/ready', allHealthy ? 'healthy' : 'unhealthy', checks); res.status(status).json({ status: allHealthy ? "ready" : "not ready", From be94c6f132a0b3f6d22c2dbf820df8e9f0bdd386 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Sun, 15 Jun 2025 15:47:52 +0200 Subject: [PATCH 03/13] feat: add complete Helm chart for DataViz service - Add production-ready Helm chart with configurable options - Support internal PostgreSQL or external Azure Database - Include health checks, security contexts, and network policies - Add HPA with custom scaling behavior - Include pre-install migration job and post-install tests - Support multiple environments (dev/staging/prod) - Add ServiceMonitor for Prometheus integration --- .gitignore | 4 +- charts/dataviz-srv/.helmignore | 25 ++ charts/dataviz-srv/Chart.yaml | 18 + charts/dataviz-srv/README.md | 363 ++++++++++++++++++ charts/dataviz-srv/templates/_helpers.tpl | 71 ++++ charts/dataviz-srv/templates/configmap.yaml | 10 + charts/dataviz-srv/templates/deployment.yaml | 108 ++++++ .../templates/external-db-secret.yaml | 11 + charts/dataviz-srv/templates/hpa.yaml | 36 ++ charts/dataviz-srv/templates/ingress.yaml | 41 ++ .../dataviz-srv/templates/job-migration.yaml | 75 ++++ .../dataviz-srv/templates/networkpolicy.yaml | 36 ++ charts/dataviz-srv/templates/pdb.yaml | 18 + charts/dataviz-srv/templates/secret.yaml | 13 + charts/dataviz-srv/templates/service.yaml | 19 + .../dataviz-srv/templates/serviceaccount.yaml | 12 + charts/dataviz-srv/templates/test.yaml | 39 ++ charts/dataviz-srv/values-dev.yaml | 44 +++ charts/dataviz-srv/values-external-db.yaml | 15 + charts/dataviz-srv/values-production.yaml | 84 ++++ charts/dataviz-srv/values.yaml | 211 ++++++++++ 21 files changed, 1252 insertions(+), 1 deletion(-) create mode 100644 charts/dataviz-srv/.helmignore create mode 100644 charts/dataviz-srv/Chart.yaml create mode 100644 charts/dataviz-srv/README.md create mode 100644 charts/dataviz-srv/templates/_helpers.tpl create mode 100644 charts/dataviz-srv/templates/configmap.yaml create mode 100644 charts/dataviz-srv/templates/deployment.yaml create mode 100644 charts/dataviz-srv/templates/external-db-secret.yaml create mode 100644 charts/dataviz-srv/templates/hpa.yaml create mode 100644 charts/dataviz-srv/templates/ingress.yaml create mode 100644 charts/dataviz-srv/templates/job-migration.yaml create mode 100644 charts/dataviz-srv/templates/networkpolicy.yaml create mode 100644 charts/dataviz-srv/templates/pdb.yaml create mode 100644 charts/dataviz-srv/templates/secret.yaml create mode 100644 charts/dataviz-srv/templates/service.yaml create mode 100644 charts/dataviz-srv/templates/serviceaccount.yaml create mode 100644 charts/dataviz-srv/templates/test.yaml create mode 100644 charts/dataviz-srv/values-dev.yaml create mode 100644 charts/dataviz-srv/values-external-db.yaml create mode 100644 charts/dataviz-srv/values-production.yaml create mode 100644 charts/dataviz-srv/values.yaml diff --git a/.gitignore b/.gitignore index c07cce3..286e276 100644 --- a/.gitignore +++ b/.gitignore @@ -160,4 +160,6 @@ bun.lockb.bak # Kubernetes/Helm helm/ k8s/ -*.yaml.backup \ No newline at end of file +*.yaml.backup +charts/dataviz-srv/charts +charts/dataviz-srv/Chart.lock \ No newline at end of file diff --git a/charts/dataviz-srv/.helmignore b/charts/dataviz-srv/.helmignore new file mode 100644 index 0000000..19fa4be --- /dev/null +++ b/charts/dataviz-srv/.helmignore @@ -0,0 +1,25 @@ +# Development files +values-dev.yaml +values-staging.yaml +README.md + +# Git files +.git/ +.gitignore + +# IDE files +.vscode/ +.idea/ + +# Backup files +*.bak +*.tmp +*~ + +# OS files +.DS_Store +Thumbs.db + +# Test files +test/ +tests/ \ No newline at end of file diff --git a/charts/dataviz-srv/Chart.yaml b/charts/dataviz-srv/Chart.yaml new file mode 100644 index 0000000..9e6bcfc --- /dev/null +++ b/charts/dataviz-srv/Chart.yaml @@ -0,0 +1,18 @@ +apiVersion: v2 +name: dataviz-srv +description: DataViz Dashboard Service Helm Chart +type: application +version: 0.1.0 +appVersion: "1.0.0" +keywords: + - dataviz + - dashboard + - analytics +maintainers: + - name: teamdigitale + +dependencies: + - name: postgresql + version: "^16.0.0" + repository: https://charts.bitnami.com/bitnami + condition: postgresql.enabled \ No newline at end of file diff --git a/charts/dataviz-srv/README.md b/charts/dataviz-srv/README.md new file mode 100644 index 0000000..4f01cf4 --- /dev/null +++ b/charts/dataviz-srv/README.md @@ -0,0 +1,363 @@ +# DataViz Service Helm Chart + +Production-ready Helm chart for deploying DataViz dashboard service on Kubernetes with PostgreSQL, health checks, and auto-scaling. + +## Architecture + +```mermaid +flowchart TD + subgraph "Kubernetes Cluster" + subgraph "Ingress" + I[nginx-ingress] + end + + subgraph "DataViz Namespace" + subgraph "Application" + D[dataviz-srv deployment] + S[dataviz-srv service] + HPA[HorizontalPodAutoscaler] + end + + subgraph "Database" + PG[(PostgreSQL)] + EXT[(External Azure DB)] + end + + subgraph "Jobs" + M[migration-job] + T[test-pod] + end + + subgraph "Security" + NP[NetworkPolicy] + SA[ServiceAccount] + end + end + + subgraph "Monitoring" + SM[ServiceMonitor] + PROM[Prometheus] + end + end + + I --> S + S --> D + D --> PG + D -.-> EXT + HPA --> D + M --> PG + M -.-> EXT + SM --> PROM + + classDef app fill:#e1f5fe + classDef db fill:#f3e5f5 + classDef security fill:#fff3e0 + + class D,S,HPA app + class PG,EXT db + class NP,SA security +``` + +## Quick Start + +```bash +# Add dependencies +helm repo add bitnami https://charts.bitnami.com/bitnami +helm repo update + +# Install with internal PostgreSQL +helm upgrade --install dataviz-srv ./charts/dataviz-srv \ + --namespace dataviz \ + --create-namespace + +# Test deployment +helm test dataviz-srv -n dataviz +``` + +## Chart Structure + +``` +charts/dataviz-srv/ +β”œβ”€β”€ Chart.yaml # Chart metadata +β”œβ”€β”€ Chart.lock # Dependencies lock +β”œβ”€β”€ .helmignore # Excluded files +β”œβ”€β”€ values.yaml # Default configuration +β”œβ”€β”€ values-dev.yaml # Development overrides +β”œβ”€β”€ values-production.yaml # Production overrides +β”œβ”€β”€ values-external-db.yaml # External database config +β”œβ”€β”€ charts/ +β”‚ └── postgresql-13.4.4.tgz # PostgreSQL dependency +└── templates/ + β”œβ”€β”€ _helpers.tpl # Template helpers + β”œβ”€β”€ configmap.yaml # Environment configuration + β”œβ”€β”€ deployment.yaml # Main application + β”œβ”€β”€ service.yaml # Service definition + β”œβ”€β”€ ingress.yaml # External access + β”œβ”€β”€ secret.yaml # Application secrets + β”œβ”€β”€ serviceaccount.yaml # RBAC service account + β”œβ”€β”€ hpa.yaml # Horizontal Pod Autoscaler + β”œβ”€β”€ pdb.yaml # Pod Disruption Budget + β”œβ”€β”€ networkpolicy.yaml # Network security + β”œβ”€β”€ job-migration.yaml # Database migration + β”œβ”€β”€ external-db-secret.yaml # External DB credentials + └── test.yaml # Post-install tests +``` + +## Configuration + +### Database Options + +#### Internal PostgreSQL (Default) +```yaml +postgresql: + enabled: true + auth: + username: dataviz + database: dataviz +``` + +#### External Azure Database +```yaml +postgresql: + enabled: false + +externalDatabase: + host: my-server.postgres.database.azure.com + username: dataviz_user@my-server + database: dataviz + existingSecret: azure-postgres-credentials +``` + +### Environment Configuration + +| Environment | File | Description | +|-------------|------|-------------| +| Development | `values-dev.yaml` | 1 replica, debug logging | +| Production | `values-production.yaml` | 3+ replicas, external DB | +| External DB | `values-external-db.yaml` | Azure PostgreSQL config | + +### Health Checks + +```mermaid +sequenceDiagram + participant K as Kubernetes + participant A as App Container + participant DB as Database + + Note over K,A: Startup Sequence + K->>A: startupProbe /api/startup + A->>DB: Check tables exist + DB-->>A: Tables ready + A-->>K: 200 OK + + Note over K,A: Runtime Monitoring + loop Every 10s + K->>A: livenessProbe /api/health + A-->>K: 200 OK (basic health) + end + + loop Every 5s + K->>A: readinessProbe /api/ready + A->>DB: Connection test + DB-->>A: Connected + A-->>K: 200 OK (ready for traffic) + end +``` + +## Deployment Scenarios + +### Development +```bash +helm upgrade --install dataviz-dev ./charts/dataviz-srv \ + --namespace dataviz-dev \ + --create-namespace \ + --values charts/dataviz-srv/values-dev.yaml +``` + +### Production with External Database +```bash +# Create secrets first +kubectl create secret generic azure-postgres-credentials \ + --from-literal=password=your-db-password \ + -n dataviz + +kubectl create secret generic dataviz-secrets \ + --from-literal=JWT_SECRET=your-jwt-secret \ + --from-literal=RESEND_API_KEY=your-resend-key \ + --from-literal=OPENAI_API_KEY=your-openai-key \ + -n dataviz + +# Deploy +helm upgrade --install dataviz-prod ./charts/dataviz-srv \ + --namespace dataviz \ + --create-namespace \ + --values charts/dataviz-srv/values-production.yaml \ + --values charts/dataviz-srv/values-external-db.yaml +``` + +## Monitoring & Observability + +### Prometheus Integration +```yaml +serviceMonitor: + enabled: true + namespace: monitoring +``` + +### Available Metrics +- `dataviz_uptime_seconds` - Application uptime +- `dataviz_memory_usage_bytes` - Memory consumption +- `dataviz_database_status` - Database connectivity +- `dataviz_users_total` - Total users count +- `dataviz_charts_total` - Total charts count + +### Logging +Structured JSON logging with configurable levels: +```yaml +env: + LOG_LEVEL: info # debug, info, warn, error +``` + +## Security + +### Pod Security Context +```yaml +podSecurityContext: + runAsNonRoot: true + runAsUser: 1000 + fsGroup: 2000 + +securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: [ALL] + readOnlyRootFilesystem: false +``` + +### Network Policies +```yaml +networkPolicy: + enabled: true + ingress: + from: + - namespaceSelector: + matchLabels: + name: ingress-nginx + egress: + to: + - {} # Configurable egress rules +``` + +## Scaling + +### Horizontal Pod Autoscaler +```yaml +autoscaling: + enabled: true + minReplicas: 2 + maxReplicas: 10 + targetCPUUtilizationPercentage: 70 + targetMemoryUtilizationPercentage: 80 +``` + +### Resource Requests/Limits +| Environment | CPU Request | Memory Request | CPU Limit | Memory Limit | +|-------------|-------------|----------------|-----------|--------------| +| Development | 50m | 128Mi | 200m | 256Mi | +| Production | 300m | 512Mi | 1000m | 1Gi | + +## Troubleshooting + +### Common Issues + +**Migration Job Fails** +```bash +kubectl logs job/dataviz-srv-migration -n dataviz +# Check database connectivity and credentials +``` + +**App Not Ready** +```bash +kubectl logs deployment/dataviz-srv -n dataviz +curl -f http://localhost:3003/api/ready # via port-forward +``` + +**Health Check Failures** +```bash +kubectl describe pod -n dataviz +# Check health endpoint responses +``` + +### Useful Commands +```bash +# Check deployment status +helm status dataviz-srv -n dataviz + +# View application logs +kubectl logs -f deployment/dataviz-srv -n dataviz + +# Run tests +helm test dataviz-srv -n dataviz + +# Port forward for local testing +kubectl port-forward service/dataviz-srv 3003:3003 -n dataviz + +# Check HPA status +kubectl get hpa -n dataviz + +# View network policies +kubectl get networkpolicy -n dataviz +``` + +## Backup & Recovery + +### Database Backup +```yaml +postgresql: + backup: + enabled: true + schedule: "0 2 * * *" # Daily at 2 AM +``` + +### Manual Backup +```bash +kubectl exec -it statefulset/dataviz-srv-postgresql -n dataviz -- \ + pg_dump -U dataviz dataviz > backup-$(date +%Y%m%d).sql +``` + +## Upgrading + +### Application Updates +```bash +helm upgrade dataviz-srv ./charts/dataviz-srv \ + --namespace dataviz \ + --set image.tag=v1.2.0 +``` + +### Chart Dependencies +```bash +helm dependency update charts/dataviz-srv +``` + +## Values Reference + +| Parameter | Description | Default | +|-----------|-------------|---------| +| `image.repository` | Container image repository | `ghcr.io/teamdigitale/dataviz-srv` | +| `image.tag` | Container image tag | `main` | +| `replicaCount` | Number of replicas | `2` | +| `postgresql.enabled` | Enable internal PostgreSQL | `true` | +| `externalDatabase.host` | External database host | `""` | +| `ingress.enabled` | Enable ingress | `true` | +| `autoscaling.enabled` | Enable HPA | `true` | +| `networkPolicy.enabled` | Enable network policies | `true` | +| `serviceMonitor.enabled` | Enable Prometheus monitoring | `false` | + +For complete values reference, see [values.yaml](values.yaml). + +## Contributing + +1. Test changes with `helm template` and `helm lint` +2. Update version in `Chart.yaml` +3. Run `helm dependency update` if dependencies changed +4. Commit with conventional commit format \ No newline at end of file diff --git a/charts/dataviz-srv/templates/_helpers.tpl b/charts/dataviz-srv/templates/_helpers.tpl new file mode 100644 index 0000000..db6da8a --- /dev/null +++ b/charts/dataviz-srv/templates/_helpers.tpl @@ -0,0 +1,71 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "dataviz-srv.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +*/}} +{{- define "dataviz-srv.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "dataviz-srv.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "dataviz-srv.labels" -}} +helm.sh/chart: {{ include "dataviz-srv.chart" . }} +{{ include "dataviz-srv.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "dataviz-srv.selectorLabels" -}} +app.kubernetes.io/name: {{ include "dataviz-srv.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "dataviz-srv.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "dataviz-srv.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} + +{{/* +Database connection string +*/}} +{{- define "dataviz-srv.databaseUrl" -}} +{{- if .Values.postgresql.enabled }} +{{- printf "postgresql://%s:$(POSTGRES_PASSWORD)@%s-postgresql:5432/%s" .Values.postgresql.auth.username (include "dataviz-srv.fullname" .) .Values.postgresql.auth.database }} +{{- else }} +{{- printf "postgresql://%s:$(POSTGRES_PASSWORD)@%s:%v/%s" .Values.externalDatabase.username .Values.externalDatabase.host .Values.externalDatabase.port .Values.externalDatabase.database }} +{{- end }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/configmap.yaml b/charts/dataviz-srv/templates/configmap.yaml new file mode 100644 index 0000000..27bf0aa --- /dev/null +++ b/charts/dataviz-srv/templates/configmap.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dataviz-srv.fullname" . }}-config + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +data: + NODE_ENV: {{ .Values.env.NODE_ENV | quote }} + PORT: {{ .Values.env.PORT | quote }} + LOG_LEVEL: {{ .Values.env.LOG_LEVEL | quote }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/deployment.yaml b/charts/dataviz-srv/templates/deployment.yaml new file mode 100644 index 0000000..8d25c80 --- /dev/null +++ b/charts/dataviz-srv/templates/deployment.yaml @@ -0,0 +1,108 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "dataviz-srv.fullname" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +spec: + {{- if not .Values.autoscaling.enabled }} + replicas: {{ .Values.replicaCount }} + {{- end }} + selector: + matchLabels: + {{- include "dataviz-srv.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + checksum/secret: {{ include (print $.Template.BasePath "/secret.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "dataviz-srv.selectorLabels" . | nindent 8 }} + {{- with .Values.podLabels }} + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + {{- with .Values.image.pullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "dataviz-srv.serviceAccountName" . }} + {{- if .Values.podSecurityContext.enabled }} + securityContext: + {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }} + {{- end }} + containers: + - name: {{ .Chart.Name }} + {{- if .Values.securityContext.enabled }} + securityContext: + {{- omit .Values.securityContext "enabled" | toYaml | nindent 12 }} + {{- end }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: http + containerPort: {{ .Values.service.targetPort }} + protocol: TCP + env: + - name: NODE_ENV + value: {{ .Values.env.NODE_ENV }} + - name: PORT + value: {{ .Values.env.PORT | quote }} + - name: LOG_LEVEL + value: {{ .Values.env.LOG_LEVEL }} + - name: DATABASE_URL + value: {{ include "dataviz-srv.databaseUrl" . | quote }} + - name: POSTGRES_PASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ include "dataviz-srv.fullname" . }}-postgresql + key: {{ .Values.postgresql.auth.secretKeys.userPasswordKey }} + {{- else }} + name: {{ .Values.externalDatabase.existingSecret | default (printf "%s-db" (include "dataviz-srv.fullname" .)) }} + key: {{ .Values.externalDatabase.existingSecretPasswordKey }} + {{- end }} + - name: JWT_SECRET + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.existingSecret | default (printf "%s-secret" (include "dataviz-srv.fullname" .)) }} + key: JWT_SECRET + - name: RESEND_API_KEY + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.existingSecret | default (printf "%s-secret" (include "dataviz-srv.fullname" .)) }} + key: RESEND_API_KEY + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: {{ .Values.secrets.existingSecret | default (printf "%s-secret" (include "dataviz-srv.fullname" .)) }} + key: OPENAI_API_KEY + {{- if .Values.livenessProbe.enabled }} + livenessProbe: + {{- omit .Values.livenessProbe "enabled" | toYaml | nindent 12 }} + {{- end }} + {{- if .Values.readinessProbe.enabled }} + readinessProbe: + {{- omit .Values.readinessProbe "enabled" | toYaml | nindent 12 }} + {{- end }} + {{- if .Values.startupProbe.enabled }} + startupProbe: + {{- omit .Values.startupProbe "enabled" | toYaml | nindent 12 }} + {{- end }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/external-db-secret.yaml b/charts/dataviz-srv/templates/external-db-secret.yaml new file mode 100644 index 0000000..8983bf1 --- /dev/null +++ b/charts/dataviz-srv/templates/external-db-secret.yaml @@ -0,0 +1,11 @@ +{{- if and (not .Values.postgresql.enabled) (not .Values.externalDatabase.existingSecret) }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "dataviz-srv.fullname" . }}-db + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +type: Opaque +data: + {{ .Values.externalDatabase.existingSecretPasswordKey }}: {{ required "externalDatabase password is required" .Values.externalDatabase.password | b64enc }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/hpa.yaml b/charts/dataviz-srv/templates/hpa.yaml new file mode 100644 index 0000000..64cc5d0 --- /dev/null +++ b/charts/dataviz-srv/templates/hpa.yaml @@ -0,0 +1,36 @@ +{{- if .Values.autoscaling.enabled }} +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: {{ include "dataviz-srv.fullname" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: {{ include "dataviz-srv.fullname" . }} + minReplicas: {{ .Values.autoscaling.minReplicas }} + maxReplicas: {{ .Values.autoscaling.maxReplicas }} + metrics: + {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} + {{- end }} + {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} + - type: Resource + resource: + name: memory + target: + type: Utilization + averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} + {{- end }} + {{- if .Values.autoscaling.behavior }} + behavior: + {{- toYaml .Values.autoscaling.behavior | nindent 4 }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/ingress.yaml b/charts/dataviz-srv/templates/ingress.yaml new file mode 100644 index 0000000..55fb339 --- /dev/null +++ b/charts/dataviz-srv/templates/ingress.yaml @@ -0,0 +1,41 @@ +{{- if .Values.ingress.enabled -}} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ include "dataviz-srv.fullname" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if .Values.ingress.className }} + ingressClassName: {{ .Values.ingress.className }} + {{- end }} + {{- if .Values.ingress.tls }} + tls: + {{- range .Values.ingress.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range .Values.ingress.hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + pathType: {{ .pathType }} + backend: + service: + name: {{ include "dataviz-srv.fullname" $ }} + port: + number: {{ $.Values.service.port }} + {{- end }} + {{- end }} +{{- end }} diff --git a/charts/dataviz-srv/templates/job-migration.yaml b/charts/dataviz-srv/templates/job-migration.yaml new file mode 100644 index 0000000..916d84a --- /dev/null +++ b/charts/dataviz-srv/templates/job-migration.yaml @@ -0,0 +1,75 @@ +{{- if .Values.migration.enabled }} +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "dataviz-srv.fullname" . }}-migration + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": pre-install,pre-upgrade + "helm.sh/hook-weight": "1" + "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded +spec: + template: + metadata: + name: {{ include "dataviz-srv.fullname" . }}-migration + labels: + {{- include "dataviz-srv.selectorLabels" . | nindent 8 }} + job: migration + spec: + restartPolicy: Never + {{- if .Values.podSecurityContext.enabled }} + securityContext: + {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }} + {{- end }} + initContainers: + - name: wait-for-db + image: postgres:15-alpine + command: + - /bin/sh + - -c + - | + {{- if .Values.postgresql.enabled }} + until pg_isready -h {{ include "dataviz-srv.fullname" . }}-postgresql -p 5432 -U {{ .Values.postgresql.auth.username }}; do + {{- else }} + until pg_isready -h {{ .Values.externalDatabase.host }} -p {{ .Values.externalDatabase.port }} -U {{ .Values.externalDatabase.username }}; do + {{- end }} + echo "Waiting for database..." + sleep 2 + done + echo "Database is ready!" + env: + - name: PGPASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ include "dataviz-srv.fullname" . }}-postgresql + key: {{ .Values.postgresql.auth.secretKeys.userPasswordKey }} + {{- else }} + name: {{ .Values.externalDatabase.existingSecret | default (printf "%s-db" (include "dataviz-srv.fullname" .)) }} + key: {{ .Values.externalDatabase.existingSecretPasswordKey }} + {{- end }} + containers: + - name: migration + image: "{{ .Values.migration.image.repository }}:{{ .Values.migration.image.tag | default .Chart.AppVersion }}" + command: ["npx", "prisma", "migrate", "deploy"] + env: + - name: DATABASE_URL + value: {{ include "dataviz-srv.databaseUrl" . | quote }} + - name: POSTGRES_PASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ include "dataviz-srv.fullname" . }}-postgresql + key: {{ .Values.postgresql.auth.secretKeys.userPasswordKey }} + {{- else }} + name: {{ .Values.externalDatabase.existingSecret | default (printf "%s-db" (include "dataviz-srv.fullname" .)) }} + key: {{ .Values.externalDatabase.existingSecretPasswordKey }} + {{- end }} + resources: + {{- toYaml .Values.migration.resources | nindent 12 }} + {{- if .Values.securityContext.enabled }} + securityContext: + {{- omit .Values.securityContext "enabled" | toYaml | nindent 12 }} + {{- end }} +{{- end }} diff --git a/charts/dataviz-srv/templates/networkpolicy.yaml b/charts/dataviz-srv/templates/networkpolicy.yaml new file mode 100644 index 0000000..edc47e5 --- /dev/null +++ b/charts/dataviz-srv/templates/networkpolicy.yaml @@ -0,0 +1,36 @@ +{{- if .Values.networkPolicy.enabled }} +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: {{ include "dataviz-srv.fullname" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +spec: + podSelector: + matchLabels: + {{- include "dataviz-srv.selectorLabels" . | nindent 6 }} + policyTypes: + {{- if .Values.networkPolicy.ingress.enabled }} + - Ingress + {{- end }} + {{- if .Values.networkPolicy.egress.enabled }} + - Egress + {{- end }} + {{- if .Values.networkPolicy.ingress.enabled }} + ingress: + - from: + {{- range .Values.networkPolicy.ingress.from }} + - {{- toYaml . | nindent 10 }} + {{- end }} + ports: + - protocol: TCP + port: {{ .Values.service.targetPort }} + {{- end }} + {{- if .Values.networkPolicy.egress.enabled }} + egress: + {{- range .Values.networkPolicy.egress.to }} + - to: + - {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/pdb.yaml b/charts/dataviz-srv/templates/pdb.yaml new file mode 100644 index 0000000..720e710 --- /dev/null +++ b/charts/dataviz-srv/templates/pdb.yaml @@ -0,0 +1,18 @@ +{{- if .Values.podDisruptionBudget.enabled }} +apiVersion: policy/v1 +kind: PodDisruptionBudget +metadata: + name: {{ include "dataviz-srv.fullname" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +spec: + {{- if .Values.podDisruptionBudget.minAvailable }} + minAvailable: {{ .Values.podDisruptionBudget.minAvailable }} + {{- end }} + {{- if .Values.podDisruptionBudget.maxUnavailable }} + maxUnavailable: {{ .Values.podDisruptionBudget.maxUnavailable }} + {{- end }} + selector: + matchLabels: + {{- include "dataviz-srv.selectorLabels" . | nindent 6 }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/secret.yaml b/charts/dataviz-srv/templates/secret.yaml new file mode 100644 index 0000000..a75b41e --- /dev/null +++ b/charts/dataviz-srv/templates/secret.yaml @@ -0,0 +1,13 @@ +{{- if not .Values.secrets.existingSecret }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "dataviz-srv.fullname" . }}-secret + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} +type: Opaque +data: + JWT_SECRET: {{ .Values.secrets.JWT_SECRET | default (randAlphaNum 32) | b64enc }} + RESEND_API_KEY: {{ .Values.secrets.RESEND_API_KEY | b64enc }} + OPENAI_API_KEY: {{ .Values.secrets.OPENAI_API_KEY | b64enc }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/service.yaml b/charts/dataviz-srv/templates/service.yaml new file mode 100644 index 0000000..c9c677b --- /dev/null +++ b/charts/dataviz-srv/templates/service.yaml @@ -0,0 +1,19 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "dataviz-srv.fullname" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} + {{- with .Values.service.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: {{ .Values.service.targetPort }} + protocol: TCP + name: http + selector: + {{- include "dataviz-srv.selectorLabels" . | nindent 4 }} diff --git a/charts/dataviz-srv/templates/serviceaccount.yaml b/charts/dataviz-srv/templates/serviceaccount.yaml new file mode 100644 index 0000000..9051c51 --- /dev/null +++ b/charts/dataviz-srv/templates/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "dataviz-srv.serviceAccountName" . }} + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/charts/dataviz-srv/templates/test.yaml b/charts/dataviz-srv/templates/test.yaml new file mode 100644 index 0000000..f98e63d --- /dev/null +++ b/charts/dataviz-srv/templates/test.yaml @@ -0,0 +1,39 @@ +apiVersion: v1 +kind: Pod +metadata: + name: {{ include "dataviz-srv.fullname" . }}-test + labels: + {{- include "dataviz-srv.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": test + "helm.sh/hook-weight": "1" + "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded +spec: + restartPolicy: Never + containers: + - name: test + image: curlimages/curl:latest + command: + - /bin/sh + - -c + - | + set -e + echo "Testing DataViz service health endpoints..." + + # Test basic health + echo "Testing /api/health..." + curl -f http://{{ include "dataviz-srv.fullname" . }}:{{ .Values.service.port }}/api/health + + # Test readiness + echo "Testing /api/ready..." + curl -f http://{{ include "dataviz-srv.fullname" . }}:{{ .Values.service.port }}/api/ready + + # Test metrics + echo "Testing /api/metrics..." + curl -f http://{{ include "dataviz-srv.fullname" . }}:{{ .Values.service.port }}/api/metrics | head -10 + + # Test root endpoint + echo "Testing root endpoint..." + curl -f http://{{ include "dataviz-srv.fullname" . }}:{{ .Values.service.port }}/ + + echo "All tests passed! βœ…" \ No newline at end of file diff --git a/charts/dataviz-srv/values-dev.yaml b/charts/dataviz-srv/values-dev.yaml new file mode 100644 index 0000000..f7b4eae --- /dev/null +++ b/charts/dataviz-srv/values-dev.yaml @@ -0,0 +1,44 @@ +replicaCount: 1 + +image: + repository: ghcr.io/teamdigitale/dataviz-srv + tag: main + pullPolicy: Always + +env: + NODE_ENV: development + LOG_LEVEL: debug + +resources: + limits: + cpu: 200m + memory: 256Mi + requests: + cpu: 50m + memory: 128Mi + +autoscaling: + enabled: false + +ingress: + enabled: true + annotations: + cert-manager.io/cluster-issuer: letsencrypt-staging + hosts: + - host: dataviz-dev.example.com + paths: + - path: / + pathType: Prefix + tls: + - secretName: dataviz-dev-tls + hosts: + - dataviz-dev.example.com + +postgresql: + enabled: true + primary: + persistence: + size: 5Gi + +networkPolicy: + enabled: false \ No newline at end of file diff --git a/charts/dataviz-srv/values-external-db.yaml b/charts/dataviz-srv/values-external-db.yaml new file mode 100644 index 0000000..d7b0d85 --- /dev/null +++ b/charts/dataviz-srv/values-external-db.yaml @@ -0,0 +1,15 @@ +postgresql: + enabled: false + +externalDatabase: + host: my-postgres-server.postgres.database.azure.com + port: 5432 + username: dataviz_user@my-postgres-server + database: dataviz + existingSecret: azure-postgres-credentials + existingSecretPasswordKey: password + +secrets: + JWT_SECRET: "my-super-secure-jwt-secret" + RESEND_API_KEY: "re_xxxxxxxxxx" + OPENAI_API_KEY: "sk-xxxxxxxxxx" \ No newline at end of file diff --git a/charts/dataviz-srv/values-production.yaml b/charts/dataviz-srv/values-production.yaml new file mode 100644 index 0000000..4380368 --- /dev/null +++ b/charts/dataviz-srv/values-production.yaml @@ -0,0 +1,84 @@ +replicaCount: 3 + +image: + repository: ghcr.io/teamdigitale/dataviz-srv + tag: main + pullPolicy: IfNotPresent + +env: + NODE_ENV: production + LOG_LEVEL: warn + +resources: + limits: + cpu: 1000m + memory: 1Gi + requests: + cpu: 300m + memory: 512Mi + +autoscaling: + enabled: true + minReplicas: 3 + maxReplicas: 15 + targetCPUUtilizationPercentage: 60 + targetMemoryUtilizationPercentage: 70 + +ingress: + enabled: true + annotations: + nginx.ingress.kubernetes.io/rate-limit: "100" + nginx.ingress.kubernetes.io/rate-limit-window: "1m" + hosts: + - host: dataviz.example.com + paths: + - path: / + pathType: Prefix + tls: + - secretName: dataviz-prod-tls + hosts: + - dataviz.example.com + +# Use external managed database in production +postgresql: + enabled: false + +externalDatabase: + host: postgres.example.com + port: 5432 + username: dataviz + database: dataviz_prod + existingSecret: dataviz-db-secret + existingSecretPasswordKey: password + +secrets: + existingSecret: dataviz-secrets + +serviceMonitor: + enabled: true + namespace: monitoring + +networkPolicy: + enabled: true + ingress: + enabled: true + from: + - namespaceSelector: + matchLabels: + name: ingress-nginx + - namespaceSelector: + matchLabels: + name: monitoring + podSelector: + matchLabels: + app.kubernetes.io/name: prometheus + egress: + enabled: true + to: + - namespaceSelector: + matchLabels: + name: kube-system + - podSelector: {} + namespaceSelector: + matchLabels: + name: cert-manager diff --git a/charts/dataviz-srv/values.yaml b/charts/dataviz-srv/values.yaml new file mode 100644 index 0000000..412742a --- /dev/null +++ b/charts/dataviz-srv/values.yaml @@ -0,0 +1,211 @@ +replicaCount: 2 + +image: + repository: ghcr.io/teamdigitale/dataviz-srv + tag: main + pullPolicy: IfNotPresent + pullSecrets: [] + +nameOverride: "" +fullnameOverride: "" + +serviceAccount: + create: true + annotations: {} + name: "" + +podAnnotations: {} +podLabels: {} + +podSecurityContext: + enabled: true + fsGroup: 2000 + runAsNonRoot: true + runAsUser: 1000 + +securityContext: + enabled: true + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: false + runAsNonRoot: true + runAsUser: 1000 + +service: + type: ClusterIP + port: 3003 + targetPort: 3003 + annotations: {} + +ingress: + enabled: true + className: nginx + annotations: + nginx.ingress.kubernetes.io/ssl-redirect: "true" + nginx.ingress.kubernetes.io/proxy-body-size: "10m" + cert-manager.io/cluster-issuer: letsencrypt-prod + hosts: + - host: dataviz.example.com + paths: + - path: / + pathType: Prefix + tls: + - secretName: dataviz-tls + hosts: + - dataviz.example.com + +resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 100m + memory: 128Mi + +autoscaling: + enabled: true + minReplicas: 2 + maxReplicas: 10 + targetCPUUtilizationPercentage: 70 + targetMemoryUtilizationPercentage: 80 + behavior: + scaleDown: + stabilizationWindowSeconds: 300 + policies: + - type: Percent + value: 50 + periodSeconds: 60 + scaleUp: + stabilizationWindowSeconds: 60 + policies: + - type: Percent + value: 100 + periodSeconds: 60 + +livenessProbe: + enabled: true + httpGet: + path: /api/health + port: http + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + +readinessProbe: + enabled: true + httpGet: + path: /api/ready + port: http + initialDelaySeconds: 5 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + +startupProbe: + enabled: true + httpGet: + path: /api/startup + port: http + initialDelaySeconds: 10 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 30 + +nodeSelector: {} +tolerations: [] +affinity: {} + +podDisruptionBudget: + enabled: true + minAvailable: 1 + +networkPolicy: + enabled: true + ingress: + enabled: true + from: + - namespaceSelector: + matchLabels: + name: ingress-nginx + - podSelector: + matchLabels: + app.kubernetes.io/name: prometheus + egress: + enabled: true + to: + - {} # Allow all egress by default + +env: + NODE_ENV: production + PORT: "3003" + LOG_LEVEL: info + +# Database configuration +postgresql: + enabled: true + auth: + username: dataviz + database: dataviz + existingSecret: "" + secretKeys: + adminPasswordKey: postgres-password + userPasswordKey: password + primary: + persistence: + enabled: true + storageClass: "" + size: 20Gi + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 250m + memory: 256Mi + configuration: | + max_connections = 100 + shared_buffers = 32MB + effective_cache_size = 128MB + backup: + enabled: false + +# External database (when postgresql.enabled = false) +externalDatabase: + host: "" + port: 5432 + username: dataviz + database: dataviz + existingSecret: "" + existingSecretPasswordKey: password + +# Application secrets +secrets: + JWT_SECRET: "" + RESEND_API_KEY: "" + OPENAI_API_KEY: "" + existingSecret: "" + +# Service Monitor for Prometheus +serviceMonitor: + enabled: false + namespace: "" + interval: 30s + scrapeTimeout: 10s + path: /api/metrics + +# Migration job +migration: + enabled: true + image: + repository: ghcr.io/teamdigitale/dataviz-srv + tag: main + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + cpu: 100m + memory: 128Mi \ No newline at end of file From b020e397adda67cb81107dd32069453dbb470575 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Sun, 15 Jun 2025 16:08:41 +0200 Subject: [PATCH 04/13] docs: complete project README with architecture diagrams - Add comprehensive documentation with Mermaid diagrams - Document API workflows, database schema, and architecture - Include local development setup (containerized and non-containerized) - Add Fly.io deployment instructions - Document data format requirements with examples - Add monitoring, troubleshooting, and API usage examples --- README.md | 488 +++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 463 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index 215809e..db3c81f 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,499 @@ -# Dataviz chart server +# DataViz Service -## Install deps +AI-powered dashboard and chart service with real-time data visualization, user authentication, and intelligent chart suggestions. -To install dependencies: +## Architecture Overview + +```mermaid +flowchart TB + subgraph "Client Layer" + UI[Frontend App] + API[API Clients] + end + + subgraph "DataViz Service" + AUTH[Authentication] + CHARTS[Charts API] + DASH[Dashboards API] + AI[AI Suggestions] + HEALTH[Health Checks] + end + + subgraph "Data Layer" + DB[(PostgreSQL)] + REMOTE[Remote Data Sources] + end + + subgraph "External Services" + OPENAI[OpenAI GPT-4] + EMAIL[Resend Email] + end + + UI --> AUTH + UI --> CHARTS + UI --> DASH + API --> CHARTS + + CHARTS --> AI + AI --> OPENAI + AUTH --> EMAIL + + CHARTS --> DB + DASH --> DB + AUTH --> DB + + CHARTS -.-> REMOTE + + classDef api fill:#e3f2fd + classDef data fill:#f3e5f5 + classDef external fill:#fff3e0 + + class AUTH,CHARTS,DASH,AI api + class DB,REMOTE data + class OPENAI,EMAIL external +``` + +## Database Schema + +```mermaid +erDiagram + User { + string id PK + string email UK + string password + timestamp createdAt + timestamp updatedAt + } + + Chart { + string id PK + string name + string description + string chart + json config + json data + boolean publish + string remoteUrl + boolean isRemote + string preview + string userId FK + timestamp createdAt + timestamp updatedAt + } + + Dashboard { + string id PK + string name + string description + boolean publish + string userId FK + timestamp createdAt + timestamp updatedAt + } + + Slot { + string dashboardId PK,FK + string chartId PK,FK + json settings + timestamp createdAt + timestamp updatedAt + } + + Codes { + string id PK + string code + string userId FK + int expire + timestamp createdAt + timestamp updatedAt + } + + User ||--o{ Chart : creates + User ||--o{ Dashboard : owns + Dashboard ||--o{ Slot : contains + Chart ||--o{ Slot : placed_in + User ||--o{ Codes : has_activation_codes +``` + +## API Workflow + +```mermaid +sequenceDiagram + participant U as User + participant A as Auth API + participant C as Charts API + participant D as Dashboard API + participant AI as AI Service + participant DB as Database + participant EXT as External APIs + + Note over U,EXT: User Registration & Login + U->>A: POST /auth/register + A->>DB: Create user + A->>EXT: Send activation email + A-->>U: Registration successful + + U->>A: POST /auth/login + A->>DB: Verify credentials + A-->>U: Set auth cookie + + Note over U,EXT: Chart Creation with AI + U->>C: POST /charts (with data) + C->>AI: POST /hints (data analysis) + AI->>EXT: Query OpenAI for suggestions + AI-->>C: Chart recommendations + C->>DB: Save chart + C-->>U: Chart created with AI suggestions + + Note over U,EXT: Dashboard Management + U->>D: POST /dashboards + D->>DB: Create dashboard + D-->>U: Dashboard created + + U->>D: PUT /dashboards/:id/slots + D->>DB: Update chart slots + D-->>U: Dashboard layout saved + + Note over U,EXT: Public Access + U->>C: GET /charts/show/:id + C->>DB: Check if published + alt Chart is remote + C->>EXT: Fetch latest data + C->>DB: Update chart data + end + C-->>U: Chart data +``` + +## Quick Start + +### Local Development (Non-Containerized) ```bash +# Clone repository +git clone +cd dataviz-srv + +# Install dependencies bun install +# Install Prisma globally +bun install -g prisma@5.19.0 + +# Setup environment +cp sample.env .env +# Edit .env with your configuration: +# DATABASE_URL=postgresql://user:password@localhost:5432/dataviz +# JWT_SECRET=your-secret-key +# OPENAI_API_KEY=sk-your-key (optional) +# RESEND_API_KEY=re-your-key (optional) + +# Setup PostgreSQL database +# Option 1: Local PostgreSQL +createdb dataviz +# Option 2: Use existing database URL in .env + +# Generate Prisma client and apply schema +npx prisma db push + +# Seed test data (optional) +bun run seeds/seed-users.ts + +# Start development server +bun run dev +# Server runs on http://localhost:3003 ``` -## DB Setup +### Development with Docker -This project uses PostgreSQL. _Prisma_ (v5.19.0) is the ORM library used to query and store data into the db . So, install _prisma_ globally: +```bash +# Start with Docker Compose +docker-compose up --build +# Run tests +curl http://localhost:3003/api/health ``` -bun i -g prisma@5.19.0 + +## API Endpoints + +### Authentication +- `POST /auth/register` - User registration with email activation +- `POST /auth/login` - Login with JWT cookie +- `GET /auth/user` - Get current user info +- `GET /auth/logout` - Logout and clear cookie + +### Charts +- `GET /charts` - List user's charts +- `POST /charts` - Create new chart +- `GET /charts/:id` - Get chart details (auth required) +- `GET /charts/show/:id` - Public chart view (if published) +- `PUT /charts/:id` - Update chart +- `DELETE /charts/:id` - Delete chart +- `POST /charts/publish/:id` - Toggle chart visibility + +### Dashboards +- `GET /dashboards` - List user's dashboards +- `POST /dashboards` - Create dashboard +- `GET /dashboards/:id` - Get dashboard details +- `PUT /dashboards/:id` - Update dashboard +- `PUT /dashboards/:id/slots` - Update chart layout +- `DELETE /dashboards/:id` - Delete dashboard + +### AI Suggestions +- `POST /hints` - Get AI-powered chart suggestions from data + +### Health & Monitoring +- `GET /api/health` - Application health status +- `GET /api/ready` - Readiness probe (checks database) +- `GET /api/startup` - Startup probe (checks initialization) +- `GET /api/metrics` - Prometheus metrics + +## Features + +### πŸ€– AI-Powered Chart Suggestions +- Upload CSV/JSON data to get intelligent visualization recommendations +- OpenAI GPT-4 analyzes data structure and suggests optimal chart types +- Automatic data transformation suggestions (grouping, aggregation, pivoting) + +## Data Format + +DataViz expects data in matrix format for charting: + +### Sample Data Structure +```json +[ + ["_", "2024-09-18", "2024-09-19", "2024-09-20", "2024-09-21", "2024-09-22"], + ["Visits", 4319, 4405, 3821, 485, 251], + ["Uniq Visitors", 3292, 3303, 2891, 407, 224], + ["New Visits", 1102, 1109, 920, 177, 108] +] ``` -After that, you have to setup youn db +**Format Rules:** +- First row: category headers (dates, regions, etc.) +- First column: series names +- First cell (`[0][0]`): placeholder (`"_"` or `"-"`) +- Data cells: numeric values for visualization + +### Supported Chart Types +- **Bar Charts** - Categorical data comparison +- **Line Charts** - Time series and trends +- **Pie/Donut Charts** - Part-to-whole relationships +- **Geo Charts** - Geographic data visualization +- **Network/Scatter Plots** - Relationships and correlations + +### πŸ—οΈ Dashboard Builder +- Drag-and-drop chart layout with grid system +- Multiple charts per dashboard +- Configurable chart settings and positioning +- Published dashboards for public sharing + +### πŸ” Security Features +- JWT-based authentication with HTTP-only cookies +- Email activation system with PIN codes +- User isolation (users can only access their own data) +- CORS protection and security headers -Run: +### πŸ“ˆ Remote Data Integration +- Charts can pull data from external URLs +- Automatic data refresh for remote sources (daily) +- Cached data with smart update logic +## Configuration + +### Environment Variables + +| Variable | Required | Description | +|----------|----------|-------------| +| `DATABASE_URL` | Yes | PostgreSQL connection string | +| `JWT_SECRET` | Yes | Secret for JWT token signing | +| `NODE_ENV` | No | Environment (development/production) | +| `PORT` | No | Server port (default: 3003) | +| `LOG_LEVEL` | No | Logging level (debug/info/warn/error) | +| `OPENAI_API_KEY` | No | OpenAI API key for AI suggestions | +| `RESEND_API_KEY` | No | Resend API key for emails | +| `SENDER_EMAIL` | No | From email address | +| `HOST` | No | Base URL for email links | +| `DOMAINS` | No | Allowed CORS domains (comma-separated) | + +### Database Setup + +```bash +# Generate Prisma client +bunx prisma generate + +# Apply migrations +bunx prisma migrate deploy + +# Seed test data +bun run seeds/seed-users.ts + +# View database +bunx prisma studio ``` -prisma db push + +## Development Tools + +### Bruno API Collection +Test all endpoints with the included Bruno collection: +```bash +cd bruno/ +# Open with Bruno REST client ``` -It will generate prisma client and generate all the tables in the db. +### Health Monitoring +- Structured JSON logging with configurable levels +- Prometheus metrics for monitoring +- Health checks for Kubernetes deployment +- Request/response logging middleware + +### Docker Support +- `Dockerfile` - Production image +- `Dockerfile.dev` - Development with hot reload +- `docker-compose.yaml` - Full development stack -Then, seed the db +## Deployment +### Local Development +```bash +bun run dev # Hot reload server ``` -bun seeds/seed-users.ts + +### Docker +```bash +docker build -t dataviz-srv . +docker run -p 3003:3003 dataviz-srv ``` -## Run the app +### Fly.io Deployment -To run: +```bash +# Install Fly CLI +curl -L https://fly.io/install.sh | sh +# Login and create app +flyctl auth login +flyctl launch --no-deploy + +# Set environment variables +flyctl secrets set \ + DATABASE_URL="postgres://user:pass@host:5432/dbname" \ + JWT_SECRET="your-secure-jwt-secret" \ + OPENAI_API_KEY="sk-your-openai-key" \ + RESEND_API_KEY="re-your-resend-key" + +# Deploy application +flyctl deploy + +# Check deployment status +flyctl status +flyctl logs +``` + +**Fly.io Configuration** (`fly.toml`): +- Uses Dockerfile for production build +- Automatic HTTPS with certificates +- Health checks on `/api/health` +- Environment variables via `flyctl secrets` + +### Kubernetes (Helm) ```bash -bun run index.ts -# or -bun run dev +helm upgrade --install dataviz-srv ./charts/dataviz-srv \ + --namespace dataviz \ + --create-namespace ``` -This project was created using `bun init` in bun v1.1.3. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. +See [charts/dataviz-srv/README.md](charts/dataviz-srv/README.md) for detailed Helm configuration. -# fly.io config +## Project Structure -Docker file generated with +``` +β”œβ”€β”€ lib/ # Core utilities +β”‚ β”œβ”€β”€ ai.ts # OpenAI integration +β”‚ β”œβ”€β”€ db.ts # Database operations +β”‚ β”œβ”€β”€ email.ts # Email templates & sending +β”‚ β”œβ”€β”€ jwt.ts # JWT token management +β”‚ β”œβ”€β”€ logger.ts # Structured logging +β”‚ β”œβ”€β”€ middlewares.ts # Express middleware +β”‚ └── pin.ts # PIN generation +β”œβ”€β”€ routes/ # API endpoints +β”‚ β”œβ”€β”€ auth.ts # Authentication routes +β”‚ β”œβ”€β”€ charts.ts # Chart CRUD operations +β”‚ β”œβ”€β”€ dashboards.ts # Dashboard management +β”‚ β”œβ”€β”€ health.ts # Health check endpoints +β”‚ └── hints.ts # AI suggestions +β”œβ”€β”€ domain/ # Business logic +β”‚ └── dashboard-facade.ts +β”œβ”€β”€ repository/ # Data access layer +β”‚ β”œβ”€β”€ dashboard-repository.ts +β”‚ └── repository-factory.ts +β”œβ”€β”€ prisma/ # Database schema & migrations +β”œβ”€β”€ bruno/ # API testing collection +β”œβ”€β”€ charts/dataviz-srv/ # Helm chart for K8s deployment +└── docker-compose.yaml # Development environment +``` + +## API Examples +### Create Chart with AI Suggestions ```bash - bunx --bun @flydotio/dockerfile@latest -``` +# 1. Upload data and get AI suggestions +curl -X POST http://localhost:3003/hints \ + -H "Cookie: access_token=..." \ + -H "Content-Type: application/json" \ + -d '[["Region","Sales","Profit"],["North",100,20],["South",150,30]]' -then used the `flyctl secrets` command to add envs, see `sample.env` for variables reference. +# 2. Create chart based on suggestion +curl -X POST http://localhost:3003/charts \ + -H "Cookie: access_token=..." \ + -H "Content-Type: application/json" \ + -d '{ + "name": "Sales by Region", + "chart": "bar", + "data": [["Region","Sales"],["North",100],["South",150]], + "config": {"xAxis": "Region", "yAxis": ["Sales"]} + }' +``` +### Dashboard Layout Management ```bash -fly secrets set DATABASE_URL=postgres://example.com/mydb -fly secrets list +# Update dashboard with chart positions +curl -X PUT http://localhost:3003/dashboards/dashboard-id/slots \ + -H "Cookie: access_token=..." \ + -H "Content-Type: application/json" \ + -d '{ + "slots": [ + { + "chartId": "chart-1", + "settings": {"x": 0, "y": 0, "w": 2, "h": 1} + }, + { + "chartId": "chart-2", + "settings": {"x": 2, "y": 0, "w": 2, "h": 1} + } + ] + }' ``` + +## Monitoring & Observability + +### Metrics Available +- `dataviz_uptime_seconds` - Service uptime +- `dataviz_memory_usage_bytes` - Memory consumption +- `dataviz_database_status` - DB connection health +- `dataviz_users_total` - Total registered users +- `dataviz_charts_total` - Total charts created +- `dataviz_dashboards_total` - Total dashboards + +### Logging Format +```json +{ + "service": "dataviz-srv", + "level": "info", + "message": "HTTP request", + "method": "GET", + "url": "/charts", + "status": 200, + "duration_ms": 45, + "user_id": "user-123" +} +``` \ No newline at end of file From 9df54c453f114f26b2d61130ee3c07641332b580 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Sun, 15 Jun 2025 16:37:59 +0200 Subject: [PATCH 05/13] fix: restore Dockerfile.app and update CI to use correct Dockerfile - Restore Dockerfile.app (production optimized multi-stage build) - Update GitHub Actions to use Dockerfile.app instead of Dockerfile --- .github/workflows/release.yml | 2 +- Dockerfile | 68 ++++++++++++++++++----------------- 2 files changed, 36 insertions(+), 34 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e7a4ef3..ce1dec6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -48,7 +48,7 @@ jobs: - name: Build and push uses: docker/build-push-action@v6 with: - file: "Dockerfile.app" + file: "Dockerfile" platforms: linux/amd64 context: . push: ${{ github.event_name != 'pull_request' }} diff --git a/Dockerfile b/Dockerfile index f05eb00..9697da5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,45 +1,47 @@ -# syntax = docker/dockerfile:1 +# BASE Stage +FROM oven/bun:1 AS base -# Adjust BUN_VERSION as desired -FROM imbios/bun-node:1-20-slim as base +# setup all global artifacts. why Node? A: https://github.com/oven-sh/bun/issues/4848 +RUN apt update \ + && apt install -y curl -LABEL fly_launch_runtime="Bun/Prisma" +ARG NODE_VERSION=20 +RUN curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o n \ + && bash n $NODE_VERSION \ + && rm n \ + && npm install -g n + -# Bun/Prisma app lives here -WORKDIR /app +# INSTALL Stage -# Set production environment -ENV NODE_ENV="production" +# install dependencies into temp folder. this will cache them and speed up future builds +FROM base AS install +WORKDIR /temp/prod/ +COPY package.json bun.lockb ./ +RUN bun install --frozen-lockfile --production -# Throw-away build stage to reduce size of final image -FROM base as build -# Install packages needed to build node modules -RUN apt-get update -qq && \ - apt-get install --no-install-recommends -y build-essential openssl pkg-config python-is-python3 +# PRERELEASE Stage -# Install node modules -COPY --link bun.lockb package.json ./ -RUN bun install --ci +# copy node_modules from temp folder. then copy all (non-ignored) project files into the image +FROM install AS prerelease -# Generate Prisma Client -COPY --link prisma . -RUN npx prisma generate - -# Copy application code -COPY --link . . +WORKDIR /usr/src/app -# Final stage for app image -FROM base +COPY --from=install /temp/prod/node_modules node_modules +COPY . . +RUN npx prisma generate -# Install packages needed for deployment -RUN apt-get update -qq && \ - apt-get install --no-install-recommends -y openssl && \ - rm -rf /var/lib/apt/lists /var/cache/apt/archives +# RELEASE Stage -# Copy built application -COPY --from=build /app /app +FROM base AS release +COPY --from=prerelease /usr/src/app/node_modules ./node_modules +COPY --from=prerelease /usr/src/app/index.ts . +COPY --from=prerelease /usr/src/app/lib ./lib +COPY --from=prerelease /usr/src/app/routes ./routes +COPY --from=prerelease /usr/src/app/package.json . -# Start the server by default, this can be overwritten at runtime -EXPOSE 3003 -CMD [ "bun", "run", "start" ] +# run the app +USER bun +EXPOSE 3003/tcp +CMD ["bun", "run", "index.ts"] From 6aef88d743c1d733e3c93ee5c4a205f254e29a2e Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Tue, 17 Jun 2025 13:27:52 +0200 Subject: [PATCH 06/13] =?UTF-8?q?=F0=9F=A7=AA=20Add=20Helm=20auto-versioni?= =?UTF-8?q?ng=20workflow=20for=20testing,=20lint=20&=20push?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/release.yml | 164 ++++++++++++++++++++++++++++++++-- 1 file changed, 155 insertions(+), 9 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ce1dec6..18f00eb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,24 +1,28 @@ -name: release +name: πŸš€ Release on: push: - branches: [main] + branches: [main, k8s-ready] tags: - "v*" pull_request: - branches: [main] + branches: [main, k8s-ready] permissions: # To push Docker images to GitHub packages: write + # To push Helm charts to GitHub + contents: read jobs: docker-build: + name: 🐳 Build Docker Image runs-on: ubuntu-latest steps: - - name: Checkout + - name: πŸ“¦ Checkout uses: actions/checkout@v4 - - name: Docker meta + + - name: 🏷️ Docker meta id: meta uses: docker/metadata-action@v5 with: @@ -30,22 +34,23 @@ jobs: type=ref,event=branch type=semver,pattern={{version}} type=semver,pattern={{major}}.{{minor}} - - name: Set up QEMU + + - name: πŸ”§ Set up QEMU uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx + - name: πŸ—οΈ Set up Docker Buildx uses: docker/setup-buildx-action@v3 with: platforms: linux/amd64 - - name: Login to GitHub Container Registry + - name: πŸ” Login to GitHub Container Registry uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Build and push + - name: πŸš€ Build and push uses: docker/build-push-action@v6 with: file: "Dockerfile" @@ -54,3 +59,144 @@ jobs: push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + + helm-release: + name: βš“ Helm Chart Release + runs-on: ubuntu-latest + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/k8s-ready') + steps: + - name: πŸ“¦ Checkout + uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 + + - name: πŸ” Check for Helm chart changes (excluding values files) + uses: dorny/paths-filter@v3 + id: changes + with: + filters: | + charts: + - 'charts/**' + - '!charts/**/values*.yaml' + - '!charts/**/values*.yml' + + - name: βš“ Install Helm + if: steps.changes.outputs.charts == 'true' + uses: azure/setup-helm@v4 + with: + version: '3.14.0' + + - name: 🏷️ Auto-version Helm chart + if: steps.changes.outputs.charts == 'true' + id: version + run: | + cd charts/dataviz-srv + + # Get current version from Chart.yaml + CURRENT_VERSION=$(grep '^version:' Chart.yaml | awk '{print $2}' | tr -d '"') + + # If no version exists, start with 1.0.0 + if [ -z "$CURRENT_VERSION" ] || [ "$CURRENT_VERSION" = "null" ]; then + NEW_VERSION="1.0.0" + else + # Parse version components + IFS='.' read -r major minor patch <<< "$CURRENT_VERSION" + + # Increment patch version + NEW_VERSION="$major.$minor.$((patch + 1))" + fi + + echo "🏷️ Current version: $CURRENT_VERSION" + echo "🏷️ New version: $NEW_VERSION" + + # Update Chart.yaml with new version + sed -i "s/^version:.*/version: \"$NEW_VERSION\"/" Chart.yaml + + # Also update appVersion to match if it exists + if grep -q '^appVersion:' Chart.yaml; then + sed -i "s/^appVersion:.*/appVersion: \"$NEW_VERSION\"/" Chart.yaml + fi + + echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT + + # Show the changes + echo "πŸ“ Updated Chart.yaml:" + cat Chart.yaml | grep -E '^(version|appVersion):' + + - name: πŸ” Helm lint + if: steps.changes.outputs.charts == 'true' + run: | + helm lint charts/dataviz-srv + + - name: βœ… Helm template validation + if: steps.changes.outputs.charts == 'true' + run: | + helm template dataviz-srv charts/dataviz-srv --values charts/dataviz-srv/values.yaml --dry-run + + - name: πŸ“ Commit version bump + if: steps.changes.outputs.charts == 'true' + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add charts/dataviz-srv/Chart.yaml + git commit -m "🏷️ Bump Helm chart version to ${{ steps.version.outputs.new_version }}" + git push + + - name: πŸ“¦ Package Helm chart + if: steps.changes.outputs.charts == 'true' + run: | + helm package charts/dataviz-srv + + - name: πŸ” Login to GitHub Container Registry for Helm + if: steps.changes.outputs.charts == 'true' + run: | + echo ${{ secrets.GITHUB_TOKEN }} | helm registry login ghcr.io -u ${{ github.repository_owner }} --password-stdin + + - name: πŸš€ Push Helm chart + if: steps.changes.outputs.charts == 'true' + run: | + helm push dataviz-srv-${{ steps.version.outputs.new_version }}.tgz oci://ghcr.io/teamdigitale/helm-charts + + - name: 🏷️ Create Git tag + if: steps.changes.outputs.charts == 'true' + run: | + git tag "helm-v${{ steps.version.outputs.new_version }}" + git push origin "helm-v${{ steps.version.outputs.new_version }}" + + helm-validation: + name: πŸ” Helm Validation (PR) + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + steps: + - name: πŸ“¦ Checkout + uses: actions/checkout@v4 + + - name: πŸ” Check for Helm chart changes + uses: dorny/paths-filter@v3 + id: changes + with: + filters: | + charts: + - 'charts/**' + + - name: βš“ Install Helm + if: steps.changes.outputs.charts == 'true' + uses: azure/setup-helm@v4 + with: + version: '3.14.0' + + - name: πŸ” Helm lint + if: steps.changes.outputs.charts == 'true' + run: | + helm lint charts/dataviz-srv + + - name: βœ… Helm template validation + if: steps.changes.outputs.charts == 'true' + run: | + helm template dataviz-srv charts/dataviz-srv --values charts/dataviz-srv/values.yaml --dry-run + + - name: πŸ“Š Helm chart diff (if applicable) + if: steps.changes.outputs.charts == 'true' + run: | + echo "::notice title=Helm Changes::Helm chart changes detected in this PR. Please review the modifications." \ No newline at end of file From 0a499c428906a7c48ca73222fc6a33cd005cde9b Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Tue, 17 Jun 2025 13:34:38 +0200 Subject: [PATCH 07/13] fix: helm update & helm icon --- .github/workflows/release.yml | 12 ++++++++++++ charts/dataviz-srv/Chart.yaml | 1 + 2 files changed, 13 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 18f00eb..9eb4366 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -87,6 +87,18 @@ jobs: with: version: '3.14.0' + - name: πŸ“¦ Update Helm dependencies + if: steps.changes.outputs.charts == 'true' + run: | + cd charts/dataviz-srv + helm dependency update + + - name: πŸ“¦ Update Helm dependencies + if: steps.changes.outputs.charts == 'true' + run: | + cd charts/dataviz-srv + helm dependency update + - name: 🏷️ Auto-version Helm chart if: steps.changes.outputs.charts == 'true' id: version diff --git a/charts/dataviz-srv/Chart.yaml b/charts/dataviz-srv/Chart.yaml index 9e6bcfc..5b54bfa 100644 --- a/charts/dataviz-srv/Chart.yaml +++ b/charts/dataviz-srv/Chart.yaml @@ -4,6 +4,7 @@ description: DataViz Dashboard Service Helm Chart type: application version: 0.1.0 appVersion: "1.0.0" +icon: https://cdn.jsdelivr.net/npm/@mdi/svg@7.4.47/svg/view-dashboard.svg keywords: - dataviz - dashboard From 7bfd87650aa046bfcaa44578c8787e776b4e6272 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Tue, 17 Jun 2025 13:36:46 +0200 Subject: [PATCH 08/13] fix: workflow permission --- .github/workflows/release.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9eb4366..da3eb61 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,9 @@ permissions: # To push Docker images to GitHub packages: write # To push Helm charts to GitHub - contents: read + contents: write + # To push commits and tags + actions: read jobs: docker-build: From ea2fbaeb312e93afa58a64a7a5f6d46295814bc8 Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Tue, 17 Jun 2025 11:37:19 +0000 Subject: [PATCH 09/13] =?UTF-8?q?=F0=9F=8F=B7=EF=B8=8F=20Bump=20Helm=20cha?= =?UTF-8?q?rt=20version=20to=200.1.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- charts/dataviz-srv/Chart.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/charts/dataviz-srv/Chart.yaml b/charts/dataviz-srv/Chart.yaml index 5b54bfa..8ceb401 100644 --- a/charts/dataviz-srv/Chart.yaml +++ b/charts/dataviz-srv/Chart.yaml @@ -2,8 +2,8 @@ apiVersion: v2 name: dataviz-srv description: DataViz Dashboard Service Helm Chart type: application -version: 0.1.0 -appVersion: "1.0.0" +version: "0.1.1" +appVersion: "0.1.1" icon: https://cdn.jsdelivr.net/npm/@mdi/svg@7.4.47/svg/view-dashboard.svg keywords: - dataviz From 573621e88df9a82e9cbc84d98bd28b9f0a901fd7 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Tue, 17 Jun 2025 13:47:56 +0200 Subject: [PATCH 10/13] feat: make appversion only for tags --- .github/workflows/release.yml | 32 +++++++++++++++++++++++++------- charts/dataviz-srv/values.yaml | 4 ++-- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index da3eb61..085368a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -121,15 +121,26 @@ jobs: NEW_VERSION="$major.$minor.$((patch + 1))" fi - echo "🏷️ Current version: $CURRENT_VERSION" - echo "🏷️ New version: $NEW_VERSION" + echo "🏷️ Current chart version: $CURRENT_VERSION" + echo "🏷️ New chart version: $NEW_VERSION" - # Update Chart.yaml with new version + # Update Chart.yaml with new chart version sed -i "s/^version:.*/version: \"$NEW_VERSION\"/" Chart.yaml - # Also update appVersion to match if it exists - if grep -q '^appVersion:' Chart.yaml; then - sed -i "s/^appVersion:.*/appVersion: \"$NEW_VERSION\"/" Chart.yaml + # Update appVersion only if this is a tag push (new app release) + if [[ "${GITHUB_REF}" == refs/tags/* ]]; then + # Extract version from tag (e.g., v1.2.3 -> 1.2.3) + APP_VERSION=${GITHUB_REF#refs/tags/} + APP_VERSION=${APP_VERSION#v} # Remove 'v' prefix if present + + echo "πŸš€ This is a tag push, updating appVersion to: $APP_VERSION" + sed -i "s/^appVersion:.*/appVersion: \"$APP_VERSION\"/" Chart.yaml + + echo "app_version=$APP_VERSION" >> $GITHUB_OUTPUT + else + echo "πŸ“ Branch push - keeping current appVersion unchanged" + CURRENT_APP_VERSION=$(grep '^appVersion:' Chart.yaml | awk '{print $2}' | tr -d '"') + echo "app_version=$CURRENT_APP_VERSION" >> $GITHUB_OUTPUT fi echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT @@ -154,7 +165,14 @@ jobs: git config --local user.email "action@github.com" git config --local user.name "GitHub Action" git add charts/dataviz-srv/Chart.yaml - git commit -m "🏷️ Bump Helm chart version to ${{ steps.version.outputs.new_version }}" + + # Create appropriate commit message + if [[ "${GITHUB_REF}" == refs/tags/* ]]; then + git commit -m "🏷️ Bump Helm chart to ${{ steps.version.outputs.new_version }} + app to ${{ steps.version.outputs.app_version }}" + else + git commit -m "🏷️ Bump Helm chart version to ${{ steps.version.outputs.new_version }}" + fi + git push - name: πŸ“¦ Package Helm chart diff --git a/charts/dataviz-srv/values.yaml b/charts/dataviz-srv/values.yaml index 412742a..f16a256 100644 --- a/charts/dataviz-srv/values.yaml +++ b/charts/dataviz-srv/values.yaml @@ -1,8 +1,8 @@ -replicaCount: 2 +replicaCount: 1 image: repository: ghcr.io/teamdigitale/dataviz-srv - tag: main + tag: "" pullPolicy: IfNotPresent pullSecrets: [] From dbd70be3dd351c40fac35d3bae35a72cc0f5a7da Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Tue, 17 Jun 2025 11:48:30 +0000 Subject: [PATCH 11/13] =?UTF-8?q?=F0=9F=8F=B7=EF=B8=8F=20Bump=20Helm=20cha?= =?UTF-8?q?rt=20version=20to=200.1.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- charts/dataviz-srv/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/dataviz-srv/Chart.yaml b/charts/dataviz-srv/Chart.yaml index 8ceb401..f7b6095 100644 --- a/charts/dataviz-srv/Chart.yaml +++ b/charts/dataviz-srv/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: dataviz-srv description: DataViz Dashboard Service Helm Chart type: application -version: "0.1.1" +version: "0.1.2" appVersion: "0.1.1" icon: https://cdn.jsdelivr.net/npm/@mdi/svg@7.4.47/svg/view-dashboard.svg keywords: From 8635c16a3ba5b050c02523393ec54452eb40bd22 Mon Sep 17 00:00:00 2001 From: Raffaele Vitiello Date: Tue, 17 Jun 2025 13:52:19 +0200 Subject: [PATCH 12/13] fix: helm validation pr --- .github/workflows/release.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 085368a..4dc5e41 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -95,12 +95,6 @@ jobs: cd charts/dataviz-srv helm dependency update - - name: πŸ“¦ Update Helm dependencies - if: steps.changes.outputs.charts == 'true' - run: | - cd charts/dataviz-srv - helm dependency update - - name: 🏷️ Auto-version Helm chart if: steps.changes.outputs.charts == 'true' id: version @@ -218,6 +212,12 @@ jobs: with: version: '3.14.0' + - name: πŸ“¦ Update Helm dependencies + if: steps.changes.outputs.charts == 'true' + run: | + cd charts/dataviz-srv + helm dependency update + - name: πŸ” Helm lint if: steps.changes.outputs.charts == 'true' run: | From 16dbbcffdd07b5b285a65ac381731ef54809b4d8 Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Tue, 17 Jun 2025 11:53:36 +0000 Subject: [PATCH 13/13] =?UTF-8?q?=F0=9F=8F=B7=EF=B8=8F=20Bump=20Helm=20cha?= =?UTF-8?q?rt=20version=20to=200.1.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- charts/dataviz-srv/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/dataviz-srv/Chart.yaml b/charts/dataviz-srv/Chart.yaml index f7b6095..27065db 100644 --- a/charts/dataviz-srv/Chart.yaml +++ b/charts/dataviz-srv/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: dataviz-srv description: DataViz Dashboard Service Helm Chart type: application -version: "0.1.2" +version: "0.1.3" appVersion: "0.1.1" icon: https://cdn.jsdelivr.net/npm/@mdi/svg@7.4.47/svg/view-dashboard.svg keywords: