fix: optimize postgres endpoint to offload task

offload db population task to rq

Signed-off-by: Ameya Shenoy <shenoy.ameya@gmail.com>
This commit is contained in:
Ameya Shenoy 2021-02-13 04:59:48 +05:30
parent c3ee44e7e0
commit 316ee5df41
Signed by: codingcoffee
GPG key ID: F7D58AAC5DACF8D3
3 changed files with 55 additions and 13 deletions

View file

@ -35,8 +35,9 @@ def fetch_bhav_copy_equity_data(date=None):
} }
if date is None: if date is None:
date = datetime.datetime.now() date = datetime.datetime.now()
zipurl = f'https://www.bseindia.com/download/BhavCopy/Equity/EQ{date.strftime("%d%m%y")}_CSV.ZIP' datestr = date.strftime("%d%m%y")
logger.info(f'Fetching data from {zipurl}') zipurl = f'https://www.bseindia.com/download/BhavCopy/Equity/EQ{datestr}_CSV.ZIP'
logger.info('Fetching data from %s', zipurl)
resp = requests.get(zipurl, headers=headers) resp = requests.get(zipurl, headers=headers)
if resp.ok: if resp.ok:
@ -52,7 +53,7 @@ def populate_bhav_copy_data(date=None):
"""Populate DB with Bhav Copy data.""" """Populate DB with Bhav Copy data."""
try: try:
data = fetch_bhav_copy_equity_data(date=date) data = fetch_bhav_copy_equity_data(date=date)
except: except Exception as err:
# Potentially add code for alerting if needed # Potentially add code for alerting if needed
# Repeat job after 10 mins if job fails # Repeat job after 10 mins if job fails
scheduler = django_rq.get_scheduler('default') scheduler = django_rq.get_scheduler('default')
@ -60,7 +61,7 @@ def populate_bhav_copy_data(date=None):
scheduled_time=datetime.datetime.now()+datetime.timedelta(minutes=10), scheduled_time=datetime.datetime.now()+datetime.timedelta(minutes=10),
func=populate_bhav_copy_data, func=populate_bhav_copy_data,
) )
raise ValueError(f"Error fetching data from BSE for {date}") raise ValueError(f"Error fetching data from BSE for {date}\nDetails: {err}")
else: else:
del data[0] # delete title row del data[0] # delete title row
populate_bhav_copy_data_into_redis(data) populate_bhav_copy_data_into_redis(data)
@ -68,7 +69,7 @@ def populate_bhav_copy_data(date=None):
def populate_bhav_copy_data_into_redis(data): def populate_bhav_copy_data_into_redis(data):
logger.info(f'Populating data into redis') logger.info('Populating data into redis')
pipe = cache.pipeline() pipe = cache.pipeline()
cache.delete("stocks") cache.delete("stocks")
for stock in data: for stock in data:
@ -98,7 +99,7 @@ def populate_bhav_copy_data_into_redis(data):
@transaction.atomic @transaction.atomic
def populate_bhav_copy_data_into_postgres(data, date=None): def populate_bhav_copy_data_into_postgres(data, date=None):
logger.info(f'Populating data into postgres for {date}') logger.info('Populating data into postgres for %s', date)
if date is None: if date is None:
date = datetime.datetime.now().date() date = datetime.datetime.now().date()
for stock in data: for stock in data:
@ -120,3 +121,25 @@ def populate_bhav_copy_data_into_postgres(data, date=None):
tdcloindi=stock[13], tdcloindi=stock[13],
) )
def stocks_csv_to_json(data):
stocks = []
for stock in data:
stocks.append({
"sc_code": stock[0],
"sc_name": stock[1],
"sc_group": stock[2],
"sc_type": stock[3],
"open_price": float(stock[4]),
"high_price": float(stock[5]),
"low_price": float(stock[6]),
"close_price": float(stock[7]),
"last_price": float(stock[8]),
"prevclose_price": float(stock[9]),
"no_trades": int(stock[10]),
"no_of_shrs": int(stock[11]),
"net_turnov": float(stock[12]),
"tdcloindi": stock[13],
})
return stocks

View file

@ -8,6 +8,8 @@ import datetime
import logging import logging
# third-party imports # third-party imports
import django_rq
from rest_framework import generics from rest_framework import generics
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.decorators import api_view from rest_framework.decorators import api_view
@ -16,7 +18,12 @@ from django_redis import get_redis_connection
# app imports # app imports
from app.models import BhavCopyEquity from app.models import BhavCopyEquity
from app.serializers import BhavCopyEquitySerializer from app.serializers import BhavCopyEquitySerializer
from app.utils import populate_bhav_copy_data from app.utils import (
populate_bhav_copy_data,
fetch_bhav_copy_equity_data,
stocks_csv_to_json,
populate_bhav_copy_data_into_postgres
)
cache = get_redis_connection("default") cache = get_redis_connection("default")
@ -42,11 +49,23 @@ def bhavCopyEquityList(request):
# Fetch data if not present # Fetch data if not present
if len(serializer.data) == 0: if len(serializer.data) == 0:
logger.info(f'Data not available in DB') logger.info('Data not available in DB, trying to fetch from BSE')
populate_bhav_copy_data(date=req_date) try:
queryset = BhavCopyEquity.objects.all().filter(date=req_date) data = fetch_bhav_copy_equity_data(date=req_date)
serializer = BhavCopyEquitySerializer(queryset, many=True) except:
return Response({
'data': [],
'message': "Unable to fetch data from BSE"
})
del data[0] # delete title row
logger.info('Enqueue background task to populate Postgres DB')
django_rq.enqueue(populate_bhav_copy_data_into_postgres, args=(data, req_date))
logger.info('Return quick response')
stocks = stocks_csv_to_json(data)
return Response({
"data": stocks,
"message": 'Data was directly sourced from BSE!'
})
return Response({ return Response({
"data": serializer.data, "data": serializer.data,
"message": ret_message "message": ret_message

View file

@ -214,7 +214,7 @@
{text: 'High', value: 'high_price'}, {text: 'High', value: 'high_price'},
{text: 'Low', value: 'low_price'}, {text: 'Low', value: 'low_price'},
{text: 'Close', value: 'close_price'}, {text: 'Close', value: 'close_price'},
{ text: 'Diff (%)', value: 'diff_percent' }, {text: 'Diff (%)', value: 'diff_percent' },
{text: 'Last', value: 'last_price'}, {text: 'Last', value: 'last_price'},
{text: 'Previous Close', value: 'prevclose_price'}, {text: 'Previous Close', value: 'prevclose_price'},
{text: 'No. of Trades', value: 'no_trades'}, {text: 'No. of Trades', value: 'no_trades'},