Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
tamtam-fitness committed Sep 4, 2023
1 parent b60f55c commit 147e58c
Show file tree
Hide file tree
Showing 14 changed files with 175 additions and 234 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup:
docker compose down
docker compose up -d
docker compose up -d --build

enter_container:
docker exec -it python_app bash
Expand Down
7 changes: 1 addition & 6 deletions src/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@

import logging

# import sentry_sdk
from common import settings
from sentry_sdk.integrations.logging import LoggingIntegration


# sentry_logging = LoggingIntegration(level=logging.INFO, event_level=logging.ERROR)
Expand All @@ -13,4 +8,4 @@
# integrations=[sentry_logging],
# environment=settings.ENV,
# traces_sample_rate=1.0,
# )
# )
4 changes: 1 addition & 3 deletions src/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,5 @@

settings = read_yaml(os.path.join(base_dir, f"src/common/yaml_configs/{env}.yaml"))

init_logger(
os.path.join(settings.BASE_DIR, "src/common/logger/logging_config.yaml")
)
init_logger(os.path.join(settings.BASE_DIR, "src/common/logger/logging_config.yaml"))
app_logger = logging.getLogger(__name__)
2 changes: 0 additions & 2 deletions src/common/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import os

import yaml
from pydantic import HttpUrl
from pydantic_settings import BaseSettings
Expand Down
2 changes: 1 addition & 1 deletion src/common/yaml_configs/local.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
ENV: "local"
SENTRY_DSN:
OPENAI_API_KEY: your-api-key
OPENAI_API_KEY: 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
189 changes: 135 additions & 54 deletions src/main.py
Original file line number Diff line number Diff line change
@@ -1,71 +1,88 @@
import urllib.parse
import uuid
from typing import Any

import streamlit as st
import streamlit_mermaid as stmd
import uuid
from src.model.models import EnglishWord
from src.common import app_logger
from pydantic import ValidationError
from service import OpenAISSEClient, WordQuestionBuilder

from service import WordQuestionBuilder, OpenAISSEClient
from src.common import app_logger
from src.model.models import EnglishWord

USER_NAME = "user"
ASSISTANT_NAME = "assistant"


def init_page():
def init_page() -> None:
st.set_page_config(
page_title="Utilizer: Boost your English active vocabulary 📚",
page_icon="📚"
page_title="UtiAIzer: Boost your English active vocabulary 📚", page_icon="📚"
)
st.header("Utilizer: Boost your English active vocabulary")
st.header("UtiAIzer: Boost your English active vocabulary")
st.write(":orange[Generated by chatgpt. No responsibility for any content.]")


def generate_session_id():
def generate_session_id() -> str:
return str(uuid.uuid4())


def convart_to_url_part(sentence):
return sentence.replace(" ", "%20")
def convart_to_url_part(sentence: str) -> str:
return urllib.parse.quote(sentence)


def convart_to_under_score(sentence):
converted = sentence.replace(" ", "_")
def convart_to_under_score(sentence: str) -> str:
converted = sentence.replace(" ", "_")
# del the fisrt, last char if it is "_"
if converted[0] == "_":
converted = converted[1:]
if converted[-1] == "_":
converted = converted[:-1]
return converted

def build_elsa_link(user_msg):
return f"\n - [Go To ELSA:{user_msg}](https://elsaspeak.com/en/learn-english/how-to-pronounce/{convart_to_url_part(user_msg)})"

def build_elsa_link(user_msg: str) -> str:
return f"\n - [Go To ELSA:{user_msg}](https://elsaspeak.com/en/learn-english/how-to-pronounce/{convart_to_url_part(user_msg)})"

def build_youglish_link(user_msg):
return f"\n\n - [Go To Youglish:{user_msg}](https://youglish.com/pronounce/{convart_to_url_part(user_msg)}/english?)"

def build_youglish_link(user_msg: str) -> str:
return f"\n\n - [Go To Youglish:{user_msg}](https://youglish.com/pronounce/{convart_to_url_part(user_msg)}/english?)"

def build_mermaid_graph_str(user_msg, result_str):

def build_mermaid_graph_str(user_msg: str, result_str: str) -> str:
app_logger.info(f"result_str: {result_str}")
li = map(convart_to_under_score, result_str.split("|"))
# skip the first element
# skip the first and second
next(li)
next(li)

code = f"""
graph TD
{user_msg} --> {next(li)}
{user_msg} --> {next(li)}
{user_msg} --> {next(li)}
{user_msg} --> {next(li)}
{convart_to_under_score(user_msg)} --> {next(li)}
{convart_to_under_score(user_msg)} --> {next(li)}
{convart_to_under_score(user_msg)} --> {next(li)}
{convart_to_under_score(user_msg)} --> {next(li)}
"""
return code


def ask_llm_sse(key, user_msg, assistant_msg, message_placeholder, is_collocation=False):
def ask_llm_sse(
key: str,
user_msg: str,
assistant_msg: str,
message_placeholder: Any,
is_collocation: bool = False,
) -> tuple[str, str]:
question = WordQuestionBuilder.call(key=key, word=EnglishWord(value=user_msg))
answers = OpenAISSEClient.call(question)

result_str = ""
count = 0
for answer in answers:
if count == 60:
assistant_msg += "\n\n :red[Too many answers.] \n\n"
message_placeholder.write(assistant_msg)
break

if answer.value == '[END]':
if answer.value == "[END]":
message_placeholder.write(assistant_msg)
break

Expand All @@ -74,55 +91,107 @@ def ask_llm_sse(key, user_msg, assistant_msg, message_placeholder, is_collocatio
message_placeholder.write(assistant_msg + "▌")

result_str += answer.value
count += 1

return assistant_msg, result_str


def ask_several_questions_in_order(user_msg, assistant_msg, message_placeholder):
def ask_several_questions_in_order(
user_msg: str, assistant_msg: str, message_placeholder: Any
) -> str:
# Meaning
assistant_msg += f"\n\n #### Meaning \n\n"
assistant_msg, _ = ask_llm_sse(key="meaning", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder)
assistant_msg += "\n\n #### Meaning \n\n"
assistant_msg, _ = ask_llm_sse(
key="meaning",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
)
# Pronunciation
assistant_msg += f"\n\n #### Pronunciation \n\n"
assistant_msg += "\n\n #### Pronunciation \n\n"
assistant_msg += "\n - IPA: "
assistant_msg, _ = ask_llm_sse(key="pronunciation", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder)
assistant_msg, _ = ask_llm_sse(
key="pronunciation",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
)
# pronunciation_tip
assistant_msg += "\n - Pronunciation Tip: "
assistant_msg, _ = ask_llm_sse(key="pronunciation_tip", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder)
assistant_msg, _ = ask_llm_sse(
key="pronunciation_tip",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
)
# st.write(build_youglish_link(user_msg)
assistant_msg += build_youglish_link(user_msg)
assistant_msg += build_elsa_link(user_msg)
message_placeholder.write(assistant_msg)

# Origin
assistant_msg += f"\n\n #### Origin \n\n"
assistant_msg, _ = ask_llm_sse(key="origin", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder)
# # Origin
# assistant_msg += f"\n\n #### Origin \n\n"
# assistant_msg, _ = ask_llm_sse(key="origin", user_msg=user_msg, assistant_msg=assistant_msg,
# message_placeholder=message_placeholder)
# Synonym
assistant_msg += f"\n\n #### Synonym \n\n"
assistant_msg, _ = ask_llm_sse(key="synonym", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder)
assistant_msg += "\n\n #### Synonym \n\n"
assistant_msg, _ = ask_llm_sse(
key="synonym",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
)

# Synonym
assistant_msg += "\n\n #### Antonym \n\n"
assistant_msg, _ = ask_llm_sse(
key="antonym",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
)

# Example Sentence
assistant_msg += f"\n\n #### Example Sentence \n\n"
assistant_msg, result_str = ask_llm_sse(key="example_sentence", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder)
assistant_msg += build_elsa_link(result_str)
assistant_msg += "\n\n #### Example Sentence \n\n"
assistant_msg, result_str = ask_llm_sse(
key="example_sentence",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
)

# if AI find the example sentence, then build the url
if "I'm sorry" not in result_str and "Sorry" not in result_str:
assistant_msg += build_elsa_link(result_str)

# # making_sentence_tips
# assistant_msg += "\n\n #### Making Sentence Tips \n\n"
# assistant_msg, _ = ask_llm_sse(
# key="making_sentence_tips",
# user_msg=user_msg,
# assistant_msg=assistant_msg,
# message_placeholder=message_placeholder,
# )

# Collocation
assistant_msg += f"\n\n #### Collocation MindMap \n\n"
assistant_msg, result_str = ask_llm_sse(key="collocation", user_msg=user_msg, assistant_msg=assistant_msg,
message_placeholder=message_placeholder, is_collocation=True)
assistant_msg += "\n\n #### Collocation MindMap \n\n"
assistant_msg, result_str = ask_llm_sse(
key="collocation",
user_msg=user_msg,
assistant_msg=assistant_msg,
message_placeholder=message_placeholder,
is_collocation=True,
)
app_logger.info(f"result_str: {result_str}")
code = build_mermaid_graph_str(user_msg, result_str)
assistant_msg += code
app_logger.info(f"code: {code}")
stmd.st_mermaid(code)

return assistant_msg

def ask():

def ask() -> None:
current_session = st.session_state.current_session
chat_log = st.session_state.chat_sessions[current_session]
# 現在のセッションのチャット履歴を表示
Expand All @@ -139,13 +208,23 @@ def ask():
with st.chat_message(ASSISTANT_NAME):
message_placeholder = st.empty()
assistant_msg = ""
assistant_msg = ask_several_questions_in_order(user_msg, assistant_msg, message_placeholder)
try:
assistant_msg = ask_several_questions_in_order(
user_msg, assistant_msg, message_placeholder
)
except ValidationError:
st.error(
"Please enter english word or phrase that is 30 characters or less including spaces."
)
except Exception as e:
app_logger.exception(e)
# st.error('Sorry, something went wrong.')
# セッションにチャットログを追加
chat_log.append({"name": USER_NAME, "msg": user_msg})
chat_log.append({"name": ASSISTANT_NAME, "msg": assistant_msg})


def main():
def main() -> None:
init_page()

# セッション情報の初期化
Expand All @@ -158,7 +237,9 @@ def main():

# Sidebarの実装
session_list = list(st.session_state.chat_sessions.keys())
selected_session = st.sidebar.selectbox("Select Chat Session", session_list, index=len(session_list) - 1)
selected_session = st.sidebar.selectbox(
"Select Chat Session", session_list, index=len(session_list) - 1
)
st.session_state.current_session = selected_session

if st.sidebar.button("New Chat"):
Expand All @@ -169,5 +250,5 @@ def main():
ask()


if __name__ == '__main__':
if __name__ == "__main__":
main()
12 changes: 8 additions & 4 deletions src/model/models.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
from pydantic import BaseModel, Field, validator


class NotEnglishError(ValueError):
pass


class EnglishWord(BaseModel):
value: str = Field(max_length=20)
value: str = Field(max_length=30)

@validator("value")
def is_english(cls, v):
def is_english(cls, v: str) -> str:
try:
v.encode('ascii')
v.encode("ascii")
except UnicodeEncodeError:
raise ValueError("English only")
raise NotEnglishError("English only") from None
return v


Expand Down
4 changes: 2 additions & 2 deletions src/service/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from .question_builder import WordQuestionBuilder
from .openai_sse_client import OpenAISSEClient
from .question_builder import WordQuestionBuilder

__all__ = [
"WordQuestionBuilder",
"OpenAISSEClient",
]
]
Loading

0 comments on commit 147e58c

Please sign in to comment.