Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added logger changes in dev #816

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
SECRET_KEY='<-- YOUR SECRET KEY HERE -->'
SECRET_KEY = "<-- YOUR SECRET KEY HERE -->"

DB_NAME='postgres' # Insert your database name here
DB_USER='postgres' # Insert your PostgreSQL username here
DB_PASSWORD='password' #Insert your PostgreSQL password here.
DB_HOST='db'
DB_PORT='5432'
DB_NAME = "postgres" # Insert your database name here
DB_USER = "postgres" # Insert your PostgreSQL username here
DB_PASSWORD = "password" # Insert your PostgreSQL password here.
DB_HOST = "db"
DB_PORT = "5432"

SMTP_USERNAME = ""
SMTP_PASSWORD = ""

API_URL='http://localhost:8000'
API_URL = "http://localhost:8000"

LOGGING='true'
LOG_LEVEL='INFO'
LOGGING = "false"
LOG_LEVEL = "INFO"

ENV='dev'
ENV = "dev"

FRONTEND_URL=''
FRONTEND_URL = ""

INDIC_TRANS_V2_KEY=''
INDIC_TRANS_V2_KEY = ""
1 change: 1 addition & 0 deletions backend/deploy/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,4 @@ pretty-html-table
sacrebleu
tqdm
jiwer
logstash-formatter
16 changes: 16 additions & 0 deletions backend/shoonya_backend/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,19 @@ def format(self, record):

# Return the new formatted record
return super().format(record)


class FileFormatter(logging.Formatter):
"""
Class to define a formatter to be used to format the file logs
"""

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

def format(self, record):
if not hasattr(record, "user_email"):
record.user_email = "dummy"
if not hasattr(record, "request_path"):
record.request_path = "dummy"
return super().format(record)
17 changes: 12 additions & 5 deletions backend/shoonya_backend/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,9 +217,6 @@
# # Get loglevel from env
LOGLEVEL = os.getenv("LOG_LEVEL", "INFO")

# Make a new directory for logs
Path(BASE_DIR / "logs").mkdir(exist_ok=True)

# Define the list of formatters
formatters = {
"console": {
Expand Down Expand Up @@ -248,20 +245,30 @@

# If logging is enabled, add file handlers
if os.getenv("LOGGING", "False").lower() in ("true", "1", "t", "yes", "y"):
# Make a new directory for logs
Path("/logs/logs_web").mkdir(exist_ok=True)

handlers["file"] = {
"level": "WARNING",
"class": "logging.FileHandler",
"filename": os.path.join(BASE_DIR, "logs/default.log"),
"filename": "/logs/logs_web/default.log",
"formatter": "file",
}

handlers["csvfile"] = {
"level": "WARNING",
"class": "logging.FileHandler",
"filename": os.path.join(BASE_DIR, "logs/logs.csv"),
"filename": "/logs/logs_web/logs.csv",
"formatter": "csvfile",
}

# handlers["logstash"] = {
# "level": "WARNING",
# "class": "logstash_formatter.LogstashFormatterTCP",
# "port": 9600,
# "version": 1,
# }

# Setup the Cloud Logging Client
# if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
# client = gc_logging.Client()
Expand Down
8 changes: 8 additions & 0 deletions backend/users/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,13 @@
from .utils import generate_random_string, get_role_name
from rest_framework_simplejwt.tokens import RefreshToken
from dotenv import load_dotenv
import logging

regex = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
load_dotenv()

logger = logging.getLogger(__name__)


class InviteViewSet(viewsets.ViewSet):
@swagger_auto_schema(request_body=InviteGenerationSerializer)
Expand Down Expand Up @@ -158,6 +161,11 @@ def re_invite(self, request):
The invited user are again invited if they have not accepted the
invitation previously.
"""
extra_data = {
"user_email": "temp_email",
"request_path": "/regenerate",
}
logger.warning("User inside re_invite API", extra=extra_data)
all_emails = request.data.get("emails")
distinct_emails = list(set(all_emails))
existing_emails_set = set(Invite.objects.values_list("user__email", flat=True))
Expand Down
15 changes: 15 additions & 0 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ services:
volumes:
- ./backend/:/usr/src/backend/
- static_volume:/usr/src/backend/static
- logs_vol:/logs
ports:
- 8000:8000
depends_on:
Expand All @@ -59,6 +60,7 @@ services:
command: celery -A shoonya_backend.celery worker -Q default --concurrency=2 --loglevel=info
volumes:
- ./backend/:/usr/src/backend/
- logs_vol:/logs
depends_on:
- redis
- web
Expand All @@ -71,9 +73,20 @@ services:
command: celery -A shoonya_backend.celery worker -Q functions --concurrency=2 --loglevel=info
volumes:
- ./backend/:/usr/src/backend/
- logs_vol:/logs
depends_on:
- redis
- web
logstash:
container_name: logstash
image: docker.elastic.co/logstash/logstash:7.14.0
hostname: shoonya_dev_logger
volumes:
- ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf
- logs_vol:/logs
extra_hosts:
- "elasticsearch:20.42.57.194"
command: logstash -f /usr/share/logstash/pipeline/logstash.conf

# Celery beats - for scheduling daily e-mails - commented out for dev hostings.
# celery-beat:
Expand All @@ -86,6 +99,8 @@ services:
# - web

volumes:
logs_vol:
external: true
nginx_conf:
external: true
letsencrypt_certs:
Expand Down
23 changes: 23 additions & 0 deletions logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
input {
file {
path => "/logs/logs_web/default.log"
start_position => "end"
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601}"
negate => true
what => "previous"
}
}
}

filter {
# Additional filtering and parsing as needed
}

output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "django_logs_dev"
}
}