feat: schedule job in case of failure
Signed-off-by: Ameya Shenoy <shenoy.ameya@gmail.com>
This commit is contained in:
parent
b0b5c533af
commit
61245829e1
2 changed files with 52 additions and 48 deletions
|
|
@ -3,15 +3,10 @@
|
|||
|
||||
"""Initializer and scheduling done here."""
|
||||
|
||||
# standard imports
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import django_rq
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django_rq.management.commands import rqscheduler
|
||||
|
||||
|
||||
class AppConfig(AppConfig):
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from io import BytesIO, TextIOWrapper
|
|||
from zipfile import ZipFile
|
||||
|
||||
# third-party imports
|
||||
import django_rq
|
||||
import requests
|
||||
|
||||
from django_redis import get_redis_connection
|
||||
|
|
@ -49,49 +50,57 @@ def fetch_bhav_copy_equity_data(curr_date=None):
|
|||
@transaction.atomic
|
||||
def populate_bhav_copy_data():
|
||||
"""Populate DB with Bhav Copy data."""
|
||||
pipe = cache.pipeline()
|
||||
data = fetch_bhav_copy_equity_data()
|
||||
del data[0] # delete title row
|
||||
try:
|
||||
pipe = cache.pipeline()
|
||||
data = fetch_bhav_copy_equity_data()
|
||||
del data[0] # delete title row
|
||||
|
||||
cache.delete("stocks")
|
||||
for stock in data:
|
||||
# prevent creation of duplicate entries
|
||||
pipe.rpush("stocks", stock[0])
|
||||
pipe.hset(
|
||||
f"stock:{stock[0]}",
|
||||
mapping={
|
||||
"sc_code": stock[0],
|
||||
"sc_name": stock[1],
|
||||
"sc_group": stock[2],
|
||||
"sc_type": stock[3],
|
||||
"open_price": float(stock[4]),
|
||||
"high_price": float(stock[5]),
|
||||
"low_price": float(stock[6]),
|
||||
"close_price": float(stock[7]),
|
||||
"last_price": float(stock[8]),
|
||||
"prevclose_price": float(stock[9]),
|
||||
"no_trades": int(stock[10]),
|
||||
"no_of_shrs": int(stock[11]),
|
||||
"net_turnov": float(stock[12]),
|
||||
"tdcloindi": stock[13],
|
||||
}
|
||||
)
|
||||
pipe.execute()
|
||||
BhavCopyEquity.objects.get_or_create(
|
||||
sc_code=int(stock[0]),
|
||||
sc_name=stock[1],
|
||||
sc_group=stock[2],
|
||||
sc_type=stock[3],
|
||||
open_price=float(stock[4]),
|
||||
high_price=float(stock[5]),
|
||||
low_price=float(stock[6]),
|
||||
close_price=float(stock[7]),
|
||||
last_price=float(stock[8]),
|
||||
prevclose_price=float(stock[9]),
|
||||
no_trades=int(stock[10]),
|
||||
no_of_shrs=int(stock[11]),
|
||||
net_turnov=float(stock[12]),
|
||||
tdcloindi=stock[13],
|
||||
cache.delete("stocks")
|
||||
for stock in data:
|
||||
# prevent creation of duplicate entries
|
||||
pipe.rpush("stocks", stock[0])
|
||||
pipe.hset(
|
||||
f"stock:{stock[0]}",
|
||||
mapping={
|
||||
"sc_code": stock[0],
|
||||
"sc_name": stock[1],
|
||||
"sc_group": stock[2],
|
||||
"sc_type": stock[3],
|
||||
"open_price": float(stock[4]),
|
||||
"high_price": float(stock[5]),
|
||||
"low_price": float(stock[6]),
|
||||
"close_price": float(stock[7]),
|
||||
"last_price": float(stock[8]),
|
||||
"prevclose_price": float(stock[9]),
|
||||
"no_trades": int(stock[10]),
|
||||
"no_of_shrs": int(stock[11]),
|
||||
"net_turnov": float(stock[12]),
|
||||
"tdcloindi": stock[13],
|
||||
}
|
||||
)
|
||||
pipe.execute()
|
||||
BhavCopyEquity.objects.get_or_create(
|
||||
sc_code=int(stock[0]),
|
||||
sc_name=stock[1],
|
||||
sc_group=stock[2],
|
||||
sc_type=stock[3],
|
||||
open_price=float(stock[4]),
|
||||
high_price=float(stock[5]),
|
||||
low_price=float(stock[6]),
|
||||
close_price=float(stock[7]),
|
||||
last_price=float(stock[8]),
|
||||
prevclose_price=float(stock[9]),
|
||||
no_trades=int(stock[10]),
|
||||
no_of_shrs=int(stock[11]),
|
||||
net_turnov=float(stock[12]),
|
||||
tdcloindi=stock[13],
|
||||
)
|
||||
except:
|
||||
# potential code for alerting if needed goes here
|
||||
# Repeat job after 10 mins if fails at 6 pm
|
||||
scheduler = django_rq.get_scheduler('default')
|
||||
scheduler.schedule(
|
||||
scheduled_time=datetime.datetime.now()+datetime.timedelta(minutes=10),
|
||||
func=populate_bhav_copy_data,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue