Initial backend commit

This commit is contained in:
thigazhezhilan 2026-02-01 13:06:44 +00:00
commit 08c6741aae
48 changed files with 6407 additions and 0 deletions

BIN
Backend.zip Normal file

Binary file not shown.

1
README.md Normal file
View File

@ -0,0 +1 @@
Control plane API skeleton.

117
alembic.ini Normal file
View File

@ -0,0 +1,117 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url =
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

0
app/__init__.py Normal file
View File

71
app/admin_auth.py Normal file
View File

@ -0,0 +1,71 @@
from fastapi import HTTPException, Request
from app.services.auth_service import get_user_for_session
from app.services.db import db_connection
SESSION_COOKIE_NAME = "session_id"
def _resolve_role(row) -> str:
role = row[2]
if role:
return role
if row[4]:
return "SUPER_ADMIN"
if row[3]:
return "ADMIN"
return "USER"
def require_admin(request: Request):
session_id = request.cookies.get(SESSION_COOKIE_NAME)
if not session_id:
raise HTTPException(status_code=401, detail="Not authenticated")
user = get_user_for_session(session_id)
if not user:
raise HTTPException(status_code=401, detail="Not authenticated")
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"SELECT id, username, role, is_admin, is_super_admin FROM app_user WHERE id = %s",
(user["id"],),
)
row = cur.fetchone()
if not row:
raise HTTPException(status_code=403, detail="Admin access required")
role = _resolve_role(row)
if role not in ("ADMIN", "SUPER_ADMIN"):
raise HTTPException(status_code=403, detail="Admin access required")
return {
"id": row[0],
"username": row[1],
"role": role,
}
def require_super_admin(request: Request):
session_id = request.cookies.get(SESSION_COOKIE_NAME)
if not session_id:
raise HTTPException(status_code=401, detail="Not authenticated")
user = get_user_for_session(session_id)
if not user:
raise HTTPException(status_code=401, detail="Not authenticated")
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"SELECT id, username, role, is_admin, is_super_admin FROM app_user WHERE id = %s",
(user["id"],),
)
row = cur.fetchone()
if not row:
raise HTTPException(status_code=403, detail="Super admin access required")
role = _resolve_role(row)
if role != "SUPER_ADMIN":
raise HTTPException(status_code=403, detail="Super admin access required")
return {
"id": row[0],
"username": row[1],
"role": role,
}

163
app/admin_models.py Normal file
View File

@ -0,0 +1,163 @@
from datetime import datetime
from typing import Any, Optional
from pydantic import BaseModel
class TopError(BaseModel):
ts: Optional[datetime]
event: str
message: Optional[str]
source: str
user_id: Optional[str]
run_id: Optional[str]
class OverviewResponse(BaseModel):
total_users: int
users_logged_in_last_24h: int
total_runs: int
running_runs: int
stopped_runs: int
error_runs: int
live_runs_count: int
paper_runs_count: int
orders_last_24h: int
trades_last_24h: int
sip_executed_last_24h: int
top_errors: list[TopError]
class UserSummary(BaseModel):
user_id: str
username: str
role: str
is_admin: bool
created_at: Optional[datetime]
last_login_at: Optional[datetime]
active_run_id: Optional[str]
active_run_status: Optional[str]
runs_count: int
broker_connected: bool
class UsersResponse(BaseModel):
page: int
page_size: int
total: int
users: list[UserSummary]
class RunSummary(BaseModel):
run_id: str
user_id: str
status: str
created_at: Optional[datetime]
started_at: Optional[datetime]
stopped_at: Optional[datetime]
strategy: Optional[str]
mode: Optional[str]
broker: Optional[str]
sip_amount: Optional[float]
sip_frequency_value: Optional[int]
sip_frequency_unit: Optional[str]
last_event_time: Optional[datetime]
last_sip_time: Optional[datetime]
next_sip_time: Optional[datetime]
order_count: int
trade_count: int
equity_latest: Optional[float]
pnl_latest: Optional[float]
class RunsResponse(BaseModel):
page: int
page_size: int
total: int
runs: list[RunSummary]
class EventItem(BaseModel):
ts: Optional[datetime]
source: str
event: str
message: Optional[str]
level: Optional[str]
run_id: Optional[str]
meta: Optional[dict[str, Any]]
class CapitalSummary(BaseModel):
cash: Optional[float]
invested: Optional[float]
mtm: Optional[float]
equity: Optional[float]
pnl: Optional[float]
class UserDetailResponse(BaseModel):
user: UserSummary
runs: list[RunSummary]
current_config: Optional[dict[str, Any]]
events: list[EventItem]
capital_summary: CapitalSummary
class EngineStatusResponse(BaseModel):
status: Optional[str]
last_updated: Optional[datetime]
class RunDetailResponse(BaseModel):
run: RunSummary
config: Optional[dict[str, Any]]
engine_status: Optional[EngineStatusResponse]
state_snapshot: Optional[dict[str, Any]]
ledger_events: list[dict[str, Any]]
orders: list[dict[str, Any]]
trades: list[dict[str, Any]]
invariants: dict[str, Any]
class InvariantsResponse(BaseModel):
running_runs_per_user_violations: int
orphan_rows: int
duplicate_logical_time: int
negative_cash: int
invalid_qty: int
stale_running_runs: int
class SupportTicketSummary(BaseModel):
ticket_id: str
name: str
email: str
subject: str
message: str
status: str
created_at: Optional[datetime]
updated_at: Optional[datetime]
class SupportTicketsResponse(BaseModel):
page: int
page_size: int
total: int
tickets: list[SupportTicketSummary]
class DeleteSupportTicketResponse(BaseModel):
ticket_id: str
deleted: bool
class DeleteUserResponse(BaseModel):
user_id: str
deleted: dict[str, int]
audit_id: int
class HardResetResponse(BaseModel):
user_id: str
deleted: dict[str, int]
audit_id: int

109
app/admin_role_service.py Normal file
View File

@ -0,0 +1,109 @@
import os
from app.services.auth_service import create_user, get_user_by_username
from app.services.db import db_connection
VALID_ROLES = {"USER", "ADMIN", "SUPER_ADMIN"}
def _sync_legacy_flags(cur, user_id: str, role: str):
cur.execute(
"""
UPDATE app_user
SET is_admin = %s, is_super_admin = %s
WHERE id = %s
""",
(role in ("ADMIN", "SUPER_ADMIN"), role == "SUPER_ADMIN", user_id),
)
def set_user_role(actor_id: str, target_id: str, new_role: str):
if new_role not in VALID_ROLES:
return {"error": "invalid_role"}
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"SELECT role FROM app_user WHERE id = %s",
(target_id,),
)
row = cur.fetchone()
if not row:
return None
old_role = row[0]
if actor_id == target_id and old_role == "SUPER_ADMIN" and new_role != "SUPER_ADMIN":
return {"error": "cannot_demote_self"}
if old_role == new_role:
return {
"user_id": target_id,
"old_role": old_role,
"new_role": new_role,
}
cur.execute(
"""
UPDATE app_user
SET role = %s
WHERE id = %s
""",
(new_role, target_id),
)
_sync_legacy_flags(cur, target_id, new_role)
cur.execute(
"""
INSERT INTO admin_role_audit
(actor_user_id, target_user_id, old_role, new_role)
VALUES (%s, %s, %s, %s)
""",
(actor_id, target_id, old_role, new_role),
)
return {
"user_id": target_id,
"old_role": old_role,
"new_role": new_role,
}
def bootstrap_super_admin():
email = (os.getenv("SUPER_ADMIN_EMAIL") or "").strip()
if not email:
return
existing = get_user_by_username(email)
if existing:
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
UPDATE app_user
SET role = 'SUPER_ADMIN'
WHERE id = %s
""",
(existing["id"],),
)
_sync_legacy_flags(cur, existing["id"], "SUPER_ADMIN")
return
password = (os.getenv("SUPER_ADMIN_PASSWORD") or "").strip()
if not password:
raise RuntimeError("SUPER_ADMIN_PASSWORD must be set to bootstrap SUPER_ADMIN")
user = create_user(email, password)
if not user:
return
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
UPDATE app_user
SET role = 'SUPER_ADMIN'
WHERE id = %s
""",
(user["id"],),
)
_sync_legacy_flags(cur, user["id"], "SUPER_ADMIN")

151
app/admin_router.py Normal file
View File

@ -0,0 +1,151 @@
from fastapi import APIRouter, Depends, HTTPException, Query
from app.admin_auth import require_admin, require_super_admin
from app.admin_models import (
DeleteUserResponse,
HardResetResponse,
InvariantsResponse,
SupportTicketsResponse,
DeleteSupportTicketResponse,
OverviewResponse,
RunsResponse,
RunDetailResponse,
UsersResponse,
UserDetailResponse,
)
from app.admin_service import (
delete_user_hard,
hard_reset_user_data,
get_invariants,
get_support_tickets,
delete_support_ticket,
get_overview,
get_run_detail,
get_runs,
get_user_detail,
get_users,
)
from app.admin_role_service import set_user_role
router = APIRouter(prefix="/api/admin", dependencies=[Depends(require_admin)])
@router.get("/overview", response_model=OverviewResponse)
def admin_overview():
return get_overview()
@router.get("/users", response_model=UsersResponse)
def admin_users(
page: int = Query(1, ge=1),
page_size: int = Query(50, ge=1, le=200),
query: str | None = None,
):
return get_users(page, page_size, query)
@router.get("/users/{user_id}", response_model=UserDetailResponse)
def admin_user_detail(user_id: str):
detail = get_user_detail(user_id)
if not detail:
raise HTTPException(status_code=404, detail="User not found")
return detail
@router.delete("/users/{user_id}", response_model=DeleteUserResponse)
def admin_delete_user(
user_id: str,
hard: bool = Query(False),
admin_user: dict = Depends(require_super_admin),
):
if not hard:
raise HTTPException(status_code=400, detail="Hard delete requires hard=true")
result = delete_user_hard(user_id, admin_user)
if result is None:
raise HTTPException(status_code=404, detail="User not found")
return result
@router.post("/users/{user_id}/hard-reset", response_model=HardResetResponse)
def admin_hard_reset_user(
user_id: str,
admin_user: dict = Depends(require_super_admin),
):
result = hard_reset_user_data(user_id, admin_user)
if result is None:
raise HTTPException(status_code=404, detail="User not found")
return result
@router.post("/users/{user_id}/make-admin")
def admin_make_admin(user_id: str, admin_user: dict = Depends(require_super_admin)):
result = set_user_role(admin_user["id"], user_id, "ADMIN")
if result is None:
raise HTTPException(status_code=404, detail="User not found")
if result.get("error") == "cannot_demote_self":
raise HTTPException(status_code=400, detail="Cannot demote self")
if result.get("error") == "invalid_role":
raise HTTPException(status_code=400, detail="Invalid role")
return result
@router.post("/users/{user_id}/revoke-admin")
def admin_revoke_admin(user_id: str, admin_user: dict = Depends(require_super_admin)):
result = set_user_role(admin_user["id"], user_id, "USER")
if result is None:
raise HTTPException(status_code=404, detail="User not found")
if result.get("error") == "cannot_demote_self":
raise HTTPException(status_code=400, detail="Cannot demote self")
if result.get("error") == "invalid_role":
raise HTTPException(status_code=400, detail="Invalid role")
return result
@router.post("/users/{user_id}/make-super-admin")
def admin_make_super_admin(user_id: str, admin_user: dict = Depends(require_super_admin)):
result = set_user_role(admin_user["id"], user_id, "SUPER_ADMIN")
if result is None:
raise HTTPException(status_code=404, detail="User not found")
if result.get("error") == "invalid_role":
raise HTTPException(status_code=400, detail="Invalid role")
return result
@router.get("/runs", response_model=RunsResponse)
def admin_runs(
page: int = Query(1, ge=1),
page_size: int = Query(50, ge=1, le=200),
status: str | None = None,
mode: str | None = None,
user_id: str | None = None,
):
return get_runs(page, page_size, status, mode, user_id)
@router.get("/runs/{run_id}", response_model=RunDetailResponse)
def admin_run_detail(run_id: str):
detail = get_run_detail(run_id)
if not detail:
raise HTTPException(status_code=404, detail="Run not found")
return detail
@router.get("/health/invariants", response_model=InvariantsResponse)
def admin_invariants():
return get_invariants()
@router.get("/support-tickets", response_model=SupportTicketsResponse)
def admin_support_tickets(
page: int = Query(1, ge=1),
page_size: int = Query(50, ge=1, le=200),
):
return get_support_tickets(page, page_size)
@router.delete("/support-tickets/{ticket_id}", response_model=DeleteSupportTicketResponse)
def admin_delete_support_ticket(ticket_id: str):
result = delete_support_ticket(ticket_id)
if not result:
raise HTTPException(status_code=404, detail="Ticket not found")
return result

762
app/admin_service.py Normal file
View File

@ -0,0 +1,762 @@
from datetime import datetime, timedelta, timezone
import hashlib
import os
from psycopg2.extras import Json
from psycopg2.extras import RealDictCursor
from app.services.db import db_connection
from app.services.run_service import get_running_run_id
from indian_paper_trading_strategy.engine.runner import stop_engine
def _paginate(page: int, page_size: int):
page = max(page, 1)
page_size = max(min(page_size, 200), 1)
offset = (page - 1) * page_size
return page, page_size, offset
def get_overview():
now = datetime.now(timezone.utc)
since = now - timedelta(hours=24)
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute("SELECT COUNT(*) FROM app_user")
total_users = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(DISTINCT user_id)
FROM app_session
WHERE COALESCE(last_seen_at, created_at) >= %s
""",
(since,),
)
users_logged_in_last_24h = cur.fetchone()[0]
cur.execute(
"""
SELECT
COUNT(*) AS total_runs,
COUNT(*) FILTER (WHERE status = 'RUNNING') AS running_runs,
COUNT(*) FILTER (WHERE status = 'STOPPED') AS stopped_runs,
COUNT(*) FILTER (WHERE status = 'ERROR') AS error_runs,
COUNT(*) FILTER (WHERE mode = 'LIVE') AS live_runs_count,
COUNT(*) FILTER (WHERE mode = 'PAPER') AS paper_runs_count
FROM strategy_run
"""
)
run_row = cur.fetchone()
cur.execute(
"""
SELECT COUNT(*) FROM paper_order WHERE "timestamp" >= %s
""",
(since,),
)
orders_last_24h = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(*) FROM paper_trade WHERE "timestamp" >= %s
""",
(since,),
)
trades_last_24h = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(*)
FROM event_ledger
WHERE event = 'SIP_EXECUTED' AND "timestamp" >= %s
""",
(since,),
)
sip_executed_last_24h = cur.fetchone()[0]
cur.execute(
"""
SELECT ts, event, message, source, user_id, run_id
FROM (
SELECT ts, event, message, 'engine_event' AS source, user_id, run_id
FROM engine_event
WHERE event ILIKE '%ERROR%'
UNION ALL
SELECT ts, event, message, 'strategy_log' AS source, user_id, run_id
FROM strategy_log
WHERE level = 'ERROR'
) t
ORDER BY ts DESC NULLS LAST
LIMIT 10
"""
)
top_errors = [
{
"ts": row[0],
"event": row[1],
"message": row[2],
"source": row[3],
"user_id": row[4],
"run_id": row[5],
}
for row in cur.fetchall()
]
return {
"total_users": total_users,
"users_logged_in_last_24h": users_logged_in_last_24h,
"total_runs": run_row[0],
"running_runs": run_row[1],
"stopped_runs": run_row[2],
"error_runs": run_row[3],
"live_runs_count": run_row[4],
"paper_runs_count": run_row[5],
"orders_last_24h": orders_last_24h,
"trades_last_24h": trades_last_24h,
"sip_executed_last_24h": sip_executed_last_24h,
"top_errors": top_errors,
}
def get_users(page: int, page_size: int, query: str | None):
page, page_size, offset = _paginate(page, page_size)
params = []
where = ""
if query:
where = "WHERE username ILIKE %s OR user_id = %s"
params = [f"%{query}%", query]
with db_connection() as conn:
with conn.cursor(cursor_factory=RealDictCursor) as cur:
cur.execute(f"SELECT COUNT(*) FROM admin_user_metrics {where}", params)
total = cur.fetchone()["count"]
cur.execute(
f"""
SELECT *
FROM admin_user_metrics
{where}
ORDER BY created_at DESC NULLS LAST
LIMIT %s OFFSET %s
""",
(*params, page_size, offset),
)
rows = cur.fetchall()
return {
"page": page,
"page_size": page_size,
"total": total,
"users": rows,
}
def _get_active_run_id(cur, user_id: str):
cur.execute(
"""
SELECT run_id
FROM strategy_run
WHERE user_id = %s AND status = 'RUNNING'
ORDER BY created_at DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
if row:
return row[0]
cur.execute(
"""
SELECT run_id
FROM strategy_run
WHERE user_id = %s
ORDER BY created_at DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
return row[0] if row else None
def get_user_detail(user_id: str):
with db_connection() as conn:
with conn.cursor(cursor_factory=RealDictCursor) as cur:
cur.execute("SELECT * FROM admin_user_metrics WHERE user_id = %s", (user_id,))
user = cur.fetchone()
if not user:
return None
cur.execute(
"""
SELECT * FROM admin_run_metrics
WHERE user_id = %s
ORDER BY created_at DESC NULLS LAST
LIMIT 20
""",
(user_id,),
)
runs = cur.fetchall()
active_run_id = _get_active_run_id(cur, user_id)
config = None
if active_run_id:
cur.execute(
"""
SELECT strategy, sip_amount, sip_frequency_value, sip_frequency_unit,
mode, broker, active, frequency, frequency_days, unit, next_run
FROM strategy_config
WHERE user_id = %s AND run_id = %s
LIMIT 1
""",
(user_id, active_run_id),
)
cfg_row = cur.fetchone()
if cfg_row:
config = dict(cfg_row)
cur.execute(
"""
SELECT ts, event, message, level, run_id, meta, 'strategy_log' AS source
FROM strategy_log
WHERE user_id = %s
UNION ALL
SELECT ts, event, message, NULL AS level, run_id, meta, 'engine_event' AS source
FROM engine_event
WHERE user_id = %s
ORDER BY ts DESC NULLS LAST
LIMIT 50
""",
(user_id, user_id),
)
events = [
{
"ts": row[0],
"event": row[1],
"message": row[2],
"level": row[3],
"run_id": row[4],
"meta": row[5],
"source": row[6],
}
for row in cur.fetchall()
]
capital_summary = {
"cash": None,
"invested": None,
"mtm": None,
"equity": None,
"pnl": None,
}
if active_run_id:
cur.execute(
"""
SELECT
(SELECT cash FROM paper_broker_account WHERE user_id = %s AND run_id = %s LIMIT 1) AS cash,
(SELECT total_invested FROM engine_state_paper WHERE user_id = %s AND run_id = %s LIMIT 1) AS invested,
(SELECT portfolio_value FROM mtm_ledger WHERE user_id = %s AND run_id = %s ORDER BY "timestamp" DESC LIMIT 1) AS mtm,
(SELECT equity FROM paper_equity_curve WHERE user_id = %s AND run_id = %s ORDER BY "timestamp" DESC LIMIT 1) AS equity,
(SELECT pnl FROM paper_equity_curve WHERE user_id = %s AND run_id = %s ORDER BY "timestamp" DESC LIMIT 1) AS pnl
""",
(
user_id,
active_run_id,
user_id,
active_run_id,
user_id,
active_run_id,
user_id,
active_run_id,
user_id,
active_run_id,
),
)
row = cur.fetchone()
if row:
capital_summary = {
"cash": row[0],
"invested": row[1],
"mtm": row[2],
"equity": row[3],
"pnl": row[4],
}
return {
"user": user,
"runs": runs,
"current_config": config,
"events": events,
"capital_summary": capital_summary,
}
def get_runs(page: int, page_size: int, status: str | None, mode: str | None, user_id: str | None):
page, page_size, offset = _paginate(page, page_size)
filters = []
params = []
if status:
filters.append("status = %s")
params.append(status)
if mode:
filters.append("mode = %s")
params.append(mode)
if user_id:
filters.append("user_id = %s")
params.append(user_id)
where = f"WHERE {' AND '.join(filters)}" if filters else ""
with db_connection() as conn:
with conn.cursor(cursor_factory=RealDictCursor) as cur:
cur.execute(f"SELECT COUNT(*) FROM admin_run_metrics {where}", params)
total = cur.fetchone()["count"]
cur.execute(
f"""
SELECT *
FROM admin_run_metrics
{where}
ORDER BY created_at DESC NULLS LAST
LIMIT %s OFFSET %s
""",
(*params, page_size, offset),
)
runs = cur.fetchall()
return {
"page": page,
"page_size": page_size,
"total": total,
"runs": runs,
}
def get_run_detail(run_id: str):
with db_connection() as conn:
with conn.cursor(cursor_factory=RealDictCursor) as cur:
cur.execute("SELECT * FROM admin_run_metrics WHERE run_id = %s", (run_id,))
run = cur.fetchone()
if not run:
return None
user_id = run["user_id"]
cur.execute(
"""
SELECT strategy, sip_amount, sip_frequency_value, sip_frequency_unit,
mode, broker, active, frequency, frequency_days, unit, next_run
FROM strategy_config
WHERE user_id = %s AND run_id = %s
LIMIT 1
""",
(user_id, run_id),
)
config = cur.fetchone()
cur.execute(
"""
SELECT status, last_updated
FROM engine_status
WHERE user_id = %s AND run_id = %s
LIMIT 1
""",
(user_id, run_id),
)
engine_status = cur.fetchone()
cur.execute(
"""
SELECT initial_cash, cash, total_invested, nifty_units, gold_units,
last_sip_ts, last_run, sip_frequency_value, sip_frequency_unit
FROM engine_state_paper
WHERE user_id = %s AND run_id = %s
LIMIT 1
""",
(user_id, run_id),
)
state = cur.fetchone()
state_snapshot = dict(state) if state else None
cur.execute(
"""
SELECT event, "timestamp", logical_time, nifty_units, gold_units, nifty_price, gold_price, amount
FROM event_ledger
WHERE user_id = %s AND run_id = %s
ORDER BY "timestamp" DESC
LIMIT 100
""",
(user_id, run_id),
)
ledger_events = cur.fetchall()
cur.execute(
"""
SELECT id, symbol, side, qty, price, status, "timestamp"
FROM paper_order
WHERE user_id = %s AND run_id = %s
ORDER BY "timestamp" DESC
LIMIT 50
""",
(user_id, run_id),
)
orders = cur.fetchall()
cur.execute(
"""
SELECT id, order_id, symbol, side, qty, price, "timestamp"
FROM paper_trade
WHERE user_id = %s AND run_id = %s
ORDER BY "timestamp" DESC
LIMIT 50
""",
(user_id, run_id),
)
trades = cur.fetchall()
cur.execute(
"""
SELECT COUNT(*) FROM (
SELECT logical_time FROM event_ledger
WHERE user_id = %s AND run_id = %s
GROUP BY logical_time, event
HAVING COUNT(*) > 1
) t
""",
(user_id, run_id),
)
dup_event = cur.fetchone()["count"]
cur.execute(
"""
SELECT COUNT(*) FROM (
SELECT logical_time FROM mtm_ledger
WHERE user_id = %s AND run_id = %s
GROUP BY logical_time
HAVING COUNT(*) > 1
) t
""",
(user_id, run_id),
)
dup_mtm = cur.fetchone()["count"]
cur.execute(
"""
SELECT COUNT(*) FROM paper_broker_account
WHERE user_id = %s AND run_id = %s AND cash < 0
""",
(user_id, run_id),
)
neg_cash = cur.fetchone()["count"]
cur.execute(
"""
SELECT COUNT(*) FROM paper_order
WHERE user_id = %s AND run_id = %s AND qty <= 0
""",
(user_id, run_id),
)
bad_qty = cur.fetchone()["count"]
invariants = {
"duplicate_event_logical_time": dup_event,
"duplicate_mtm_logical_time": dup_mtm,
"negative_cash": neg_cash,
"invalid_qty": bad_qty,
}
return {
"run": run,
"config": dict(config) if config else None,
"engine_status": dict(engine_status) if engine_status else None,
"state_snapshot": state_snapshot,
"ledger_events": ledger_events,
"orders": orders,
"trades": trades,
"invariants": invariants,
}
def get_invariants(stale_minutes: int = 30):
cutoff = datetime.now(timezone.utc) - timedelta(minutes=stale_minutes)
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT COUNT(*) FROM (
SELECT user_id FROM strategy_run
WHERE status = 'RUNNING'
GROUP BY user_id
HAVING COUNT(*) > 1
) t
"""
)
running_runs_per_user_violations = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(*) FROM (
SELECT user_id, run_id FROM engine_state
UNION ALL
SELECT user_id, run_id FROM engine_status
UNION ALL
SELECT user_id, run_id FROM paper_order
UNION ALL
SELECT user_id, run_id FROM paper_trade
) t
LEFT JOIN strategy_run sr
ON sr.user_id = t.user_id AND sr.run_id = t.run_id
WHERE sr.run_id IS NULL
"""
)
orphan_rows = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(*) FROM (
SELECT user_id, run_id, logical_time, event
FROM event_ledger
GROUP BY user_id, run_id, logical_time, event
HAVING COUNT(*) > 1
) t
"""
)
dup_event = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(*) FROM (
SELECT user_id, run_id, logical_time
FROM mtm_ledger
GROUP BY user_id, run_id, logical_time
HAVING COUNT(*) > 1
) t
"""
)
dup_mtm = cur.fetchone()[0]
cur.execute(
"SELECT COUNT(*) FROM paper_broker_account WHERE cash < 0"
)
negative_cash = cur.fetchone()[0]
cur.execute(
"SELECT COUNT(*) FROM paper_order WHERE qty <= 0"
)
invalid_qty = cur.fetchone()[0]
cur.execute(
"""
SELECT COUNT(*) FROM strategy_run sr
LEFT JOIN (
SELECT user_id, run_id, MAX(ts) AS last_ts
FROM (
SELECT user_id, run_id, ts FROM engine_event
UNION ALL
SELECT user_id, run_id, ts FROM strategy_log
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM event_ledger
) t
GROUP BY user_id, run_id
) activity
ON activity.user_id = sr.user_id AND activity.run_id = sr.run_id
WHERE sr.status = 'RUNNING' AND (activity.last_ts IS NULL OR activity.last_ts < %s)
""",
(cutoff,),
)
stale_running_runs = cur.fetchone()[0]
return {
"running_runs_per_user_violations": running_runs_per_user_violations,
"orphan_rows": orphan_rows,
"duplicate_logical_time": dup_event + dup_mtm,
"negative_cash": negative_cash,
"invalid_qty": invalid_qty,
"stale_running_runs": stale_running_runs,
}
def get_support_tickets(page: int, page_size: int):
page, page_size, offset = _paginate(page, page_size)
with db_connection() as conn:
with conn.cursor(cursor_factory=RealDictCursor) as cur:
cur.execute("SELECT COUNT(*) FROM support_ticket")
total = cur.fetchone()["count"]
cur.execute(
"""
SELECT id AS ticket_id, name, email, subject, message, status, created_at, updated_at
FROM support_ticket
ORDER BY created_at DESC NULLS LAST
LIMIT %s OFFSET %s
""",
(page_size, offset),
)
rows = cur.fetchall()
tickets = []
for row in rows:
ticket = dict(row)
ticket["ticket_id"] = str(ticket.get("ticket_id"))
if ticket.get("created_at"):
ticket["created_at"] = ticket["created_at"]
if ticket.get("updated_at"):
ticket["updated_at"] = ticket["updated_at"]
tickets.append(ticket)
return {
"page": page,
"page_size": page_size,
"total": total,
"tickets": tickets,
}
def delete_support_ticket(ticket_id: str) -> dict | None:
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute("DELETE FROM support_ticket WHERE id = %s", (ticket_id,))
if cur.rowcount == 0:
return None
return {"ticket_id": ticket_id, "deleted": True}
def _hash_value(value: str | None) -> str | None:
if value is None:
return None
return hashlib.sha256(value.encode("utf-8")).hexdigest()
def delete_user_hard(user_id: str, admin_user: dict):
table_counts = [
("app_user", "SELECT COUNT(*) FROM app_user WHERE id = %s"),
("app_session", "SELECT COUNT(*) FROM app_session WHERE user_id = %s"),
("user_broker", "SELECT COUNT(*) FROM user_broker WHERE user_id = %s"),
("zerodha_session", "SELECT COUNT(*) FROM zerodha_session WHERE user_id = %s"),
("zerodha_request_token", "SELECT COUNT(*) FROM zerodha_request_token WHERE user_id = %s"),
("strategy_run", "SELECT COUNT(*) FROM strategy_run WHERE user_id = %s"),
("strategy_config", "SELECT COUNT(*) FROM strategy_config WHERE user_id = %s"),
("strategy_log", "SELECT COUNT(*) FROM strategy_log WHERE user_id = %s"),
("engine_status", "SELECT COUNT(*) FROM engine_status WHERE user_id = %s"),
("engine_state", "SELECT COUNT(*) FROM engine_state WHERE user_id = %s"),
("engine_state_paper", "SELECT COUNT(*) FROM engine_state_paper WHERE user_id = %s"),
("engine_event", "SELECT COUNT(*) FROM engine_event WHERE user_id = %s"),
("paper_broker_account", "SELECT COUNT(*) FROM paper_broker_account WHERE user_id = %s"),
("paper_position", "SELECT COUNT(*) FROM paper_position WHERE user_id = %s"),
("paper_order", "SELECT COUNT(*) FROM paper_order WHERE user_id = %s"),
("paper_trade", "SELECT COUNT(*) FROM paper_trade WHERE user_id = %s"),
("paper_equity_curve", "SELECT COUNT(*) FROM paper_equity_curve WHERE user_id = %s"),
("mtm_ledger", "SELECT COUNT(*) FROM mtm_ledger WHERE user_id = %s"),
("event_ledger", "SELECT COUNT(*) FROM event_ledger WHERE user_id = %s"),
]
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"SELECT id, username FROM app_user WHERE id = %s",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
target_username = row[1]
counts = {}
for name, query in table_counts:
cur.execute(query, (user_id,))
counts[name] = cur.fetchone()[0]
cur.execute("DELETE FROM app_user WHERE id = %s", (user_id,))
if cur.rowcount == 0:
return None
audit_meta = {"deleted": counts, "hard": True}
cur.execute(
"""
INSERT INTO admin_audit_log
(actor_user_hash, target_user_hash, target_username_hash, action, meta)
VALUES (%s, %s, %s, %s, %s)
RETURNING id
""",
(
_hash_value(admin_user["id"]),
_hash_value(user_id),
_hash_value(target_username),
"HARD_DELETE_USER",
Json(audit_meta),
),
)
audit_id = cur.fetchone()[0]
return {
"user_id": user_id,
"deleted": counts,
"audit_id": audit_id,
}
def hard_reset_user_data(user_id: str, admin_user: dict):
table_counts = [
("strategy_run", "SELECT COUNT(*) FROM strategy_run WHERE user_id = %s"),
("strategy_config", "SELECT COUNT(*) FROM strategy_config WHERE user_id = %s"),
("strategy_log", "SELECT COUNT(*) FROM strategy_log WHERE user_id = %s"),
("engine_status", "SELECT COUNT(*) FROM engine_status WHERE user_id = %s"),
("engine_state", "SELECT COUNT(*) FROM engine_state WHERE user_id = %s"),
("engine_state_paper", "SELECT COUNT(*) FROM engine_state_paper WHERE user_id = %s"),
("engine_event", "SELECT COUNT(*) FROM engine_event WHERE user_id = %s"),
("paper_broker_account", "SELECT COUNT(*) FROM paper_broker_account WHERE user_id = %s"),
("paper_position", "SELECT COUNT(*) FROM paper_position WHERE user_id = %s"),
("paper_order", "SELECT COUNT(*) FROM paper_order WHERE user_id = %s"),
("paper_trade", "SELECT COUNT(*) FROM paper_trade WHERE user_id = %s"),
("paper_equity_curve", "SELECT COUNT(*) FROM paper_equity_curve WHERE user_id = %s"),
("mtm_ledger", "SELECT COUNT(*) FROM mtm_ledger WHERE user_id = %s"),
("event_ledger", "SELECT COUNT(*) FROM event_ledger WHERE user_id = %s"),
]
engine_external = os.getenv("ENGINE_EXTERNAL", "").strip().lower() in {"1", "true", "yes"}
running_run_id = get_running_run_id(user_id)
if running_run_id and not engine_external:
stop_engine(user_id, timeout=15.0)
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"SELECT id, username FROM app_user WHERE id = %s",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
target_username = row[1]
counts = {}
for name, query in table_counts:
cur.execute(query, (user_id,))
counts[name] = cur.fetchone()[0]
cur.execute("DELETE FROM strategy_log WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM engine_event WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM paper_equity_curve WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM paper_trade WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM paper_order WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM paper_position WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM paper_broker_account WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM mtm_ledger WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM event_ledger WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM engine_state_paper WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM engine_state WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM engine_status WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM strategy_config WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM strategy_run WHERE user_id = %s", (user_id,))
audit_meta = {"reset": counts, "hard": True}
cur.execute(
"""
INSERT INTO admin_audit_log
(actor_user_hash, target_user_hash, target_username_hash, action, meta)
VALUES (%s, %s, %s, %s, %s)
RETURNING id
""",
(
_hash_value(admin_user["id"]),
_hash_value(user_id),
_hash_value(target_username),
"HARD_RESET_USER",
Json(audit_meta),
),
)
audit_id = cur.fetchone()[0]
return {
"user_id": user_id,
"deleted": counts,
"audit_id": audit_id,
}

296
app/broker_store.py Normal file
View File

@ -0,0 +1,296 @@
from datetime import datetime, timezone
from app.services.crypto_service import decrypt_value, encrypt_value
from app.services.db import db_transaction
def _row_to_entry(row):
(
user_id,
broker,
connected,
access_token,
connected_at,
api_key,
api_secret,
user_name,
broker_user_id,
auth_state,
pending_broker,
pending_api_key,
pending_api_secret,
pending_started_at,
) = row
entry = {
"broker": broker,
"connected": bool(connected),
"connected_at": connected_at,
"api_key": api_key,
"auth_state": auth_state,
"user_name": user_name,
"broker_user_id": broker_user_id,
}
if pending_broker or pending_api_key or pending_api_secret or pending_started_at:
pending = {
"broker": pending_broker,
"api_key": pending_api_key,
"api_secret": decrypt_value(pending_api_secret)
if pending_api_secret
else None,
"started_at": pending_started_at,
}
entry["pending"] = pending
return entry
def load_user_brokers():
with db_transaction() as cur:
cur.execute(
"""
SELECT user_id, broker, connected, access_token, connected_at,
api_key, api_secret, user_name, broker_user_id, auth_state,
pending_broker, pending_api_key, pending_api_secret, pending_started_at
FROM user_broker
"""
)
rows = cur.fetchall()
return {row[0]: _row_to_entry(row) for row in rows}
def save_user_brokers(data):
with db_transaction() as cur:
for user_id, entry in data.items():
cur.execute(
"""
INSERT INTO user_broker (
user_id, broker, connected, access_token, connected_at,
api_key, api_secret, user_name, broker_user_id, auth_state,
pending_broker, pending_api_key, pending_api_secret, pending_started_at
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (user_id)
DO UPDATE SET
broker = EXCLUDED.broker,
connected = EXCLUDED.connected,
access_token = EXCLUDED.access_token,
connected_at = EXCLUDED.connected_at,
api_key = EXCLUDED.api_key,
api_secret = EXCLUDED.api_secret,
user_name = EXCLUDED.user_name,
broker_user_id = EXCLUDED.broker_user_id,
auth_state = EXCLUDED.auth_state,
pending_broker = EXCLUDED.pending_broker,
pending_api_key = EXCLUDED.pending_api_key,
pending_api_secret = EXCLUDED.pending_api_secret,
pending_started_at = EXCLUDED.pending_started_at
""",
(
user_id,
entry.get("broker"),
bool(entry.get("connected")),
encrypt_value(entry.get("access_token"))
if entry.get("access_token")
else None,
entry.get("connected_at"),
entry.get("api_key"),
encrypt_value(entry.get("api_secret"))
if entry.get("api_secret")
else None,
entry.get("user_name"),
entry.get("broker_user_id"),
entry.get("auth_state"),
(entry.get("pending") or {}).get("broker"),
(entry.get("pending") or {}).get("api_key"),
encrypt_value((entry.get("pending") or {}).get("api_secret"))
if (entry.get("pending") or {}).get("api_secret")
else None,
(entry.get("pending") or {}).get("started_at"),
),
)
def now_utc():
return datetime.now(timezone.utc)
def get_user_broker(user_id: str):
with db_transaction() as cur:
cur.execute(
"""
SELECT user_id, broker, connected, access_token, connected_at,
api_key, api_secret, user_name, broker_user_id, auth_state,
pending_broker, pending_api_key, pending_api_secret, pending_started_at
FROM user_broker
WHERE user_id = %s
""",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
return _row_to_entry(row)
def clear_user_broker(user_id: str):
with db_transaction() as cur:
cur.execute("DELETE FROM user_broker WHERE user_id = %s", (user_id,))
def set_pending_broker(user_id: str, broker: str, api_key: str, api_secret: str):
started_at = now_utc()
with db_transaction() as cur:
cur.execute(
"""
INSERT INTO user_broker (
user_id, pending_broker, pending_api_key, pending_api_secret, pending_started_at,
api_key, api_secret, auth_state
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (user_id)
DO UPDATE SET
pending_broker = EXCLUDED.pending_broker,
pending_api_key = EXCLUDED.pending_api_key,
pending_api_secret = EXCLUDED.pending_api_secret,
pending_started_at = EXCLUDED.pending_started_at,
api_key = EXCLUDED.api_key,
api_secret = EXCLUDED.api_secret,
auth_state = EXCLUDED.auth_state
""",
(
user_id,
broker,
api_key,
encrypt_value(api_secret),
started_at,
api_key,
encrypt_value(api_secret),
"PENDING",
),
)
return {
"broker": broker,
"api_key": api_key,
"api_secret": api_secret,
"started_at": started_at,
}
def get_pending_broker(user_id: str):
with db_transaction() as cur:
cur.execute(
"""
SELECT pending_broker, pending_api_key, pending_api_secret, pending_started_at
FROM user_broker
WHERE user_id = %s
""",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
if not row[0] or not row[1] or not row[2]:
return None
return {
"broker": row[0],
"api_key": row[1],
"api_secret": decrypt_value(row[2]),
"started_at": row[3],
}
def get_broker_credentials(user_id: str):
with db_transaction() as cur:
cur.execute(
"""
SELECT api_key, api_secret, pending_api_key, pending_api_secret
FROM user_broker
WHERE user_id = %s
""",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
api_key, api_secret, pending_key, pending_secret = row
key = api_key or pending_key
secret = api_secret or pending_secret
if not key or not secret:
return None
return {
"api_key": key,
"api_secret": decrypt_value(secret),
}
def set_broker_auth_state(user_id: str, auth_state: str):
with db_transaction() as cur:
cur.execute(
"""
UPDATE user_broker
SET auth_state = %s
WHERE user_id = %s
""",
(auth_state, user_id),
)
def set_connected_broker(
user_id: str,
broker: str,
access_token: str,
api_key: str | None = None,
api_secret: str | None = None,
user_name: str | None = None,
broker_user_id: str | None = None,
auth_state: str | None = None,
):
connected_at = now_utc()
with db_transaction() as cur:
cur.execute(
"""
INSERT INTO user_broker (
user_id, broker, connected, access_token, connected_at,
api_key, api_secret, user_name, broker_user_id, auth_state,
pending_broker, pending_api_key, pending_api_secret, pending_started_at
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NULL, NULL, NULL, NULL)
ON CONFLICT (user_id)
DO UPDATE SET
broker = EXCLUDED.broker,
connected = EXCLUDED.connected,
access_token = EXCLUDED.access_token,
connected_at = EXCLUDED.connected_at,
api_key = EXCLUDED.api_key,
api_secret = EXCLUDED.api_secret,
user_name = EXCLUDED.user_name,
broker_user_id = EXCLUDED.broker_user_id,
auth_state = EXCLUDED.auth_state,
pending_broker = NULL,
pending_api_key = NULL,
pending_api_secret = NULL,
pending_started_at = NULL
""",
(
user_id,
broker,
True,
encrypt_value(access_token),
connected_at,
api_key,
encrypt_value(api_secret) if api_secret else None,
user_name,
broker_user_id,
auth_state,
),
)
return {
"broker": broker,
"connected": True,
"access_token": access_token,
"connected_at": connected_at,
"api_key": api_key,
"api_secret": api_secret,
"user_name": user_name,
"broker_user_id": broker_user_id,
"auth_state": auth_state,
}

491
app/db_models.py Normal file
View File

@ -0,0 +1,491 @@
from sqlalchemy import (
BigInteger,
Boolean,
CheckConstraint,
Column,
Date,
DateTime,
ForeignKey,
ForeignKeyConstraint,
Index,
Integer,
Numeric,
String,
Text,
UniqueConstraint,
func,
text,
)
from sqlalchemy.dialects.postgresql import JSONB
from app.services.db import Base
class AppUser(Base):
__tablename__ = "app_user"
id = Column(String, primary_key=True)
username = Column(String, nullable=False, unique=True)
password_hash = Column(String, nullable=False)
is_admin = Column(Boolean, nullable=False, server_default=text("false"))
is_super_admin = Column(Boolean, nullable=False, server_default=text("false"))
role = Column(String, nullable=False, server_default=text("'USER'"))
__table_args__ = (
CheckConstraint("role IN ('USER','ADMIN','SUPER_ADMIN')", name="chk_app_user_role"),
Index("idx_app_user_role", "role"),
Index("idx_app_user_is_admin", "is_admin"),
Index("idx_app_user_is_super_admin", "is_super_admin"),
)
class AppSession(Base):
__tablename__ = "app_session"
id = Column(String, primary_key=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False)
last_seen_at = Column(DateTime(timezone=True))
expires_at = Column(DateTime(timezone=True), nullable=False)
__table_args__ = (
Index("idx_app_session_user_id", "user_id"),
Index("idx_app_session_expires_at", "expires_at"),
)
class UserBroker(Base):
__tablename__ = "user_broker"
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), primary_key=True)
broker = Column(String)
connected = Column(Boolean, nullable=False, server_default=text("false"))
access_token = Column(Text)
connected_at = Column(DateTime(timezone=True))
api_key = Column(Text)
user_name = Column(Text)
broker_user_id = Column(Text)
pending_broker = Column(Text)
pending_api_key = Column(Text)
pending_api_secret = Column(Text)
pending_started_at = Column(DateTime(timezone=True))
__table_args__ = (
Index("idx_user_broker_broker", "broker"),
Index("idx_user_broker_connected", "connected"),
)
class ZerodhaSession(Base):
__tablename__ = "zerodha_session"
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
linked_at = Column(DateTime(timezone=True), nullable=False)
api_key = Column(Text)
access_token = Column(Text)
request_token = Column(Text)
user_name = Column(Text)
broker_user_id = Column(Text)
__table_args__ = (
Index("idx_zerodha_session_user_id", "user_id"),
Index("idx_zerodha_session_linked_at", "linked_at"),
)
class ZerodhaRequestToken(Base):
__tablename__ = "zerodha_request_token"
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), primary_key=True)
request_token = Column(Text, nullable=False)
class StrategyRun(Base):
__tablename__ = "strategy_run"
run_id = Column(String, primary_key=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now())
started_at = Column(DateTime(timezone=True))
stopped_at = Column(DateTime(timezone=True))
status = Column(String, nullable=False)
strategy = Column(String)
mode = Column(String)
broker = Column(String)
meta = Column(JSONB)
__table_args__ = (
UniqueConstraint("user_id", "run_id", name="uq_strategy_run_user_run"),
CheckConstraint("status IN ('RUNNING','STOPPED','ERROR')", name="chk_strategy_run_status"),
Index("idx_strategy_run_user_status", "user_id", "status"),
Index("idx_strategy_run_user_created", "user_id", "created_at"),
Index(
"uq_one_running_run_per_user",
"user_id",
unique=True,
postgresql_where=text("status = 'RUNNING'"),
),
)
class StrategyConfig(Base):
__tablename__ = "strategy_config"
id = Column(BigInteger, primary_key=True, autoincrement=True)
strategy = Column(String)
sip_amount = Column(Numeric)
sip_frequency_value = Column(Integer)
sip_frequency_unit = Column(String)
mode = Column(String)
broker = Column(String)
active = Column(Boolean)
frequency = Column(Text)
frequency_days = Column(Integer)
unit = Column(String)
next_run = Column(DateTime(timezone=True))
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, ForeignKey("strategy_run.run_id", ondelete="CASCADE"), nullable=False)
__table_args__ = (
UniqueConstraint("user_id", "run_id", name="uq_strategy_config_user_run"),
)
class StrategyLog(Base):
__tablename__ = "strategy_log"
seq = Column(BigInteger, primary_key=True)
ts = Column(DateTime(timezone=True), nullable=False)
level = Column(String)
category = Column(String)
event = Column(String)
message = Column(Text)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, ForeignKey("strategy_run.run_id", ondelete="CASCADE"), nullable=False)
meta = Column(JSONB)
__table_args__ = (
Index("idx_strategy_log_ts", "ts"),
Index("idx_strategy_log_event", "event"),
Index("idx_strategy_log_user_run_ts", "user_id", "run_id", "ts"),
)
class EngineStatus(Base):
__tablename__ = "engine_status"
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
status = Column(String, nullable=False)
last_updated = Column(DateTime(timezone=True), nullable=False)
__table_args__ = (
UniqueConstraint("user_id", "run_id", name="uq_engine_status_user_run"),
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
Index("idx_engine_status_user_run", "user_id", "run_id"),
)
class EngineState(Base):
__tablename__ = "engine_state"
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
total_invested = Column(Numeric)
nifty_units = Column(Numeric)
gold_units = Column(Numeric)
last_sip_ts = Column(DateTime(timezone=True))
last_run = Column(DateTime(timezone=True))
__table_args__ = (
UniqueConstraint("user_id", "run_id", name="uq_engine_state_user_run"),
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
)
class EngineStatePaper(Base):
__tablename__ = "engine_state_paper"
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
initial_cash = Column(Numeric)
cash = Column(Numeric)
total_invested = Column(Numeric)
nifty_units = Column(Numeric)
gold_units = Column(Numeric)
last_sip_ts = Column(DateTime(timezone=True))
last_run = Column(DateTime(timezone=True))
sip_frequency_value = Column(Integer)
sip_frequency_unit = Column(String)
__table_args__ = (
UniqueConstraint("user_id", "run_id", name="uq_engine_state_paper_user_run"),
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
CheckConstraint("cash >= 0", name="chk_engine_state_paper_cash_non_negative"),
)
class EngineEvent(Base):
__tablename__ = "engine_event"
id = Column(BigInteger, primary_key=True, autoincrement=True)
ts = Column(DateTime(timezone=True), nullable=False)
event = Column(String)
data = Column(JSONB)
message = Column(Text)
meta = Column(JSONB)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, ForeignKey("strategy_run.run_id", ondelete="CASCADE"), nullable=False)
__table_args__ = (
Index("idx_engine_event_ts", "ts"),
Index("idx_engine_event_user_run_ts", "user_id", "run_id", "ts"),
)
class PaperBrokerAccount(Base):
__tablename__ = "paper_broker_account"
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
cash = Column(Numeric, nullable=False)
__table_args__ = (
UniqueConstraint("user_id", "run_id", name="uq_paper_broker_account_user_run"),
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
CheckConstraint("cash >= 0", name="chk_paper_broker_cash_non_negative"),
)
class PaperPosition(Base):
__tablename__ = "paper_position"
user_id = Column(String, primary_key=True)
run_id = Column(String, primary_key=True)
symbol = Column(String, primary_key=True)
qty = Column(Numeric, nullable=False)
avg_price = Column(Numeric)
last_price = Column(Numeric)
updated_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now())
__table_args__ = (
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
CheckConstraint("qty > 0", name="chk_paper_position_qty_positive"),
UniqueConstraint("user_id", "run_id", "symbol", name="uq_paper_position_scope"),
Index("idx_paper_position_user_run", "user_id", "run_id"),
)
class PaperOrder(Base):
__tablename__ = "paper_order"
id = Column(String, primary_key=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
symbol = Column(String, nullable=False)
side = Column(String, nullable=False)
qty = Column(Numeric, nullable=False)
price = Column(Numeric)
status = Column(String, nullable=False)
timestamp = Column("timestamp", DateTime(timezone=True), nullable=False)
logical_time = Column(DateTime(timezone=True), nullable=False)
__table_args__ = (
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
UniqueConstraint("user_id", "run_id", "id", name="uq_paper_order_scope_id"),
UniqueConstraint(
"user_id",
"run_id",
"logical_time",
"symbol",
"side",
name="uq_paper_order_logical_key",
),
CheckConstraint("qty > 0", name="chk_paper_order_qty_positive"),
CheckConstraint("price >= 0", name="chk_paper_order_price_non_negative"),
Index("idx_paper_order_ts", "timestamp"),
Index("idx_paper_order_user_run_ts", "user_id", "run_id", "timestamp"),
)
class PaperTrade(Base):
__tablename__ = "paper_trade"
id = Column(String, primary_key=True)
order_id = Column(String)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
symbol = Column(String, nullable=False)
side = Column(String, nullable=False)
qty = Column(Numeric, nullable=False)
price = Column(Numeric, nullable=False)
timestamp = Column("timestamp", DateTime(timezone=True), nullable=False)
logical_time = Column(DateTime(timezone=True), nullable=False)
__table_args__ = (
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
ForeignKeyConstraint(
["user_id", "run_id", "order_id"],
["paper_order.user_id", "paper_order.run_id", "paper_order.id"],
ondelete="CASCADE",
),
UniqueConstraint("user_id", "run_id", "id", name="uq_paper_trade_scope_id"),
UniqueConstraint(
"user_id",
"run_id",
"logical_time",
"symbol",
"side",
name="uq_paper_trade_logical_key",
),
CheckConstraint("qty > 0", name="chk_paper_trade_qty_positive"),
CheckConstraint("price >= 0", name="chk_paper_trade_price_non_negative"),
Index("idx_paper_trade_ts", "timestamp"),
Index("idx_paper_trade_user_run_ts", "user_id", "run_id", "timestamp"),
)
class PaperEquityCurve(Base):
__tablename__ = "paper_equity_curve"
user_id = Column(String, primary_key=True)
run_id = Column(String, primary_key=True)
timestamp = Column("timestamp", DateTime(timezone=True), nullable=False)
logical_time = Column(DateTime(timezone=True), primary_key=True)
equity = Column(Numeric, nullable=False)
pnl = Column(Numeric)
__table_args__ = (
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
Index("idx_paper_equity_curve_ts", "timestamp"),
Index("idx_paper_equity_curve_user_run_ts", "user_id", "run_id", "timestamp"),
)
class MTMLedger(Base):
__tablename__ = "mtm_ledger"
user_id = Column(String, primary_key=True)
run_id = Column(String, primary_key=True)
timestamp = Column("timestamp", DateTime(timezone=True), nullable=False)
logical_time = Column(DateTime(timezone=True), primary_key=True)
nifty_units = Column(Numeric)
gold_units = Column(Numeric)
nifty_price = Column(Numeric)
gold_price = Column(Numeric)
nifty_value = Column(Numeric)
gold_value = Column(Numeric)
portfolio_value = Column(Numeric)
total_invested = Column(Numeric)
pnl = Column(Numeric)
__table_args__ = (
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
Index("idx_mtm_ledger_ts", "timestamp"),
Index("idx_mtm_ledger_user_run_ts", "user_id", "run_id", "timestamp"),
)
class EventLedger(Base):
__tablename__ = "event_ledger"
id = Column(BigInteger, primary_key=True, autoincrement=True)
user_id = Column(String, ForeignKey("app_user.id", ondelete="CASCADE"), nullable=False)
run_id = Column(String, nullable=False)
timestamp = Column("timestamp", DateTime(timezone=True), nullable=False)
logical_time = Column(DateTime(timezone=True), nullable=False)
event = Column(String, nullable=False)
nifty_units = Column(Numeric)
gold_units = Column(Numeric)
nifty_price = Column(Numeric)
gold_price = Column(Numeric)
amount = Column(Numeric)
__table_args__ = (
ForeignKeyConstraint(
["user_id", "run_id"],
["strategy_run.user_id", "strategy_run.run_id"],
ondelete="CASCADE",
),
UniqueConstraint("user_id", "run_id", "event", "logical_time", name="uq_event_ledger_event_time"),
Index("idx_event_ledger_user_run_logical", "user_id", "run_id", "logical_time"),
Index("idx_event_ledger_ts", "timestamp"),
Index("idx_event_ledger_user_run_ts", "user_id", "run_id", "timestamp"),
)
class MarketClose(Base):
__tablename__ = "market_close"
symbol = Column(String, primary_key=True)
date = Column(Date, primary_key=True)
close = Column(Numeric, nullable=False)
__table_args__ = (
Index("idx_market_close_symbol", "symbol"),
Index("idx_market_close_date", "date"),
)
class AdminAuditLog(Base):
__tablename__ = "admin_audit_log"
id = Column(BigInteger, primary_key=True, autoincrement=True)
ts = Column(DateTime(timezone=True), nullable=False, server_default=func.now())
actor_user_hash = Column(Text, nullable=False)
target_user_hash = Column(Text, nullable=False)
target_username_hash = Column(Text)
action = Column(Text, nullable=False)
meta = Column(JSONB)
class AdminRoleAudit(Base):
__tablename__ = "admin_role_audit"
id = Column(BigInteger, primary_key=True, autoincrement=True)
actor_user_id = Column(String, nullable=False)
target_user_id = Column(String, nullable=False)
old_role = Column(String, nullable=False)
new_role = Column(String, nullable=False)
changed_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now())

71
app/main.py Normal file
View File

@ -0,0 +1,71 @@
import os
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.routers.auth import router as auth_router
from app.routers.broker import router as broker_router
from app.routers.health import router as health_router
from app.routers.password_reset import router as password_reset_router
from app.routers.support_ticket import router as support_ticket_router
from app.routers.system import router as system_router
from app.routers.strategy import router as strategy_router
from app.routers.zerodha import router as zerodha_router, public_router as zerodha_public_router
from app.routers.paper import router as paper_router
from market import router as market_router
from paper_mtm import router as paper_mtm_router
from app.services.strategy_service import init_log_state, resume_running_runs
from app.admin_router import router as admin_router
from app.admin_role_service import bootstrap_super_admin
app = FastAPI(
title="QuantFortune Backend",
version="1.0"
)
cors_origins = [
origin.strip()
for origin in os.getenv("CORS_ORIGINS", "").split(",")
if origin.strip()
]
if not cors_origins:
cors_origins = [
"http://localhost:3000",
"http://127.0.0.1:3000",
]
cors_origin_regex = os.getenv("CORS_ORIGIN_REGEX", "").strip()
if not cors_origin_regex:
cors_origin_regex = (
r"https://.*\\.ngrok-free\\.dev"
r"|https://.*\\.ngrok-free\\.app"
r"|https://.*\\.ngrok\\.io"
)
app.add_middleware(
CORSMiddleware,
allow_origins=cors_origins,
allow_origin_regex=cors_origin_regex or None,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(strategy_router)
app.include_router(auth_router)
app.include_router(broker_router)
app.include_router(zerodha_router)
app.include_router(zerodha_public_router)
app.include_router(paper_router)
app.include_router(market_router)
app.include_router(paper_mtm_router)
app.include_router(health_router)
app.include_router(system_router)
app.include_router(admin_router)
app.include_router(support_ticket_router)
app.include_router(password_reset_router)
@app.on_event("startup")
def init_app_state():
init_log_state()
bootstrap_super_admin()
resume_running_runs()

37
app/models.py Normal file
View File

@ -0,0 +1,37 @@
from pydantic import BaseModel, validator
from typing import Literal, Optional
class SipFrequency(BaseModel):
value: int
unit: Literal["days", "minutes"]
class StrategyStartRequest(BaseModel):
strategy_name: str
initial_cash: Optional[float] = None
sip_amount: float
sip_frequency: SipFrequency
mode: Literal["PAPER"]
@validator("initial_cash")
def validate_cash(cls, v):
if v is None:
return v
if v < 10000:
raise ValueError("Initial cash must be at least 10,000")
return v
class AuthPayload(BaseModel):
email: Optional[str] = None
username: Optional[str] = None
password: Optional[str] = None
class PasswordResetRequest(BaseModel):
email: str
class PasswordResetConfirm(BaseModel):
email: str
otp: str
new_password: str

1
app/routers/__init__.py Normal file
View File

@ -0,0 +1 @@

116
app/routers/auth.py Normal file
View File

@ -0,0 +1,116 @@
import os
from fastapi import APIRouter, HTTPException, Request, Response
from app.models import AuthPayload
from app.services.auth_service import (
SESSION_TTL_SECONDS,
create_session,
create_user,
delete_session,
get_user_for_session,
get_last_session_meta,
verify_user,
)
from app.services.email_service import send_email
router = APIRouter(prefix="/api")
SESSION_COOKIE_NAME = "session_id"
COOKIE_SECURE = os.getenv("COOKIE_SECURE", "0") == "1"
COOKIE_SAMESITE = (os.getenv("COOKIE_SAMESITE") or "lax").lower()
def _set_session_cookie(response: Response, session_id: str):
same_site = COOKIE_SAMESITE if COOKIE_SAMESITE in {"lax", "strict", "none"} else "lax"
response.set_cookie(
SESSION_COOKIE_NAME,
session_id,
httponly=True,
samesite=same_site,
max_age=SESSION_TTL_SECONDS,
secure=COOKIE_SECURE,
path="/",
)
def _get_identifier(payload: AuthPayload) -> str:
identifier = payload.username or payload.email or ""
return identifier.strip()
@router.post("/signup")
def signup(payload: AuthPayload, response: Response):
identifier = _get_identifier(payload)
if not identifier or not payload.password:
raise HTTPException(status_code=400, detail="Email and password are required")
user = create_user(identifier, payload.password)
if not user:
raise HTTPException(status_code=409, detail="User already exists")
session_id = create_session(user["id"])
_set_session_cookie(response, session_id)
try:
body = (
"Welcome to Quantfortune!\n\n"
"Your account has been created successfully.\n\n"
"You can now log in and start using the platform.\n\n"
"Quantfortune Support"
)
send_email(user["username"], "Welcome to Quantfortune", body)
except Exception:
pass
return {"id": user["id"], "username": user["username"], "role": user.get("role")}
@router.post("/login")
def login(payload: AuthPayload, response: Response, request: Request):
identifier = _get_identifier(payload)
if not identifier or not payload.password:
raise HTTPException(status_code=400, detail="Email and password are required")
user = verify_user(identifier, payload.password)
if not user:
raise HTTPException(status_code=401, detail="Invalid email or password")
client_ip = request.client.host if request.client else None
user_agent = request.headers.get("user-agent")
last_meta = get_last_session_meta(user["id"])
if last_meta.get("ip") and (
last_meta.get("ip") != client_ip or last_meta.get("user_agent") != user_agent
):
try:
body = (
"New login detected on your Quantfortune account.\n\n"
f"IP: {client_ip or 'unknown'}\n"
f"Device: {user_agent or 'unknown'}\n\n"
"If this wasn't you, please reset your password immediately."
)
send_email(user["username"], "New login detected", body)
except Exception:
pass
session_id = create_session(user["id"], ip=client_ip, user_agent=user_agent)
_set_session_cookie(response, session_id)
return {"id": user["id"], "username": user["username"], "role": user.get("role")}
@router.post("/logout")
def logout(request: Request, response: Response):
session_id = request.cookies.get(SESSION_COOKIE_NAME)
if session_id:
delete_session(session_id)
response.delete_cookie(SESSION_COOKIE_NAME, path="/")
return {"ok": True}
@router.get("/me")
def me(request: Request):
session_id = request.cookies.get(SESSION_COOKIE_NAME)
if not session_id:
raise HTTPException(status_code=401, detail="Not authenticated")
user = get_user_for_session(session_id)
if not user:
raise HTTPException(status_code=401, detail="Not authenticated")
return {"id": user["id"], "username": user["username"], "role": user.get("role")}

205
app/routers/broker.py Normal file
View File

@ -0,0 +1,205 @@
import os
from fastapi import APIRouter, HTTPException, Request
from fastapi.responses import RedirectResponse
from app.broker_store import (
clear_user_broker,
get_broker_credentials,
get_pending_broker,
get_user_broker,
set_broker_auth_state,
set_connected_broker,
set_pending_broker,
)
from app.services.auth_service import get_user_for_session
from app.services.zerodha_service import build_login_url, exchange_request_token
from app.services.email_service import send_email
from app.services.zerodha_storage import set_session
router = APIRouter(prefix="/api/broker")
def _require_user(request: Request):
session_id = request.cookies.get("session_id")
if not session_id:
raise HTTPException(status_code=401, detail="Not authenticated")
user = get_user_for_session(session_id)
if not user:
raise HTTPException(status_code=401, detail="Not authenticated")
return user
@router.post("/connect")
async def connect_broker(payload: dict, request: Request):
user = _require_user(request)
broker = (payload.get("broker") or "").strip()
token = (payload.get("token") or "").strip()
user_name = (payload.get("userName") or "").strip()
broker_user_id = (payload.get("brokerUserId") or "").strip()
if not broker or not token:
raise HTTPException(status_code=400, detail="Broker and token are required")
set_connected_broker(
user["id"],
broker,
token,
user_name=user_name or None,
broker_user_id=broker_user_id or None,
)
try:
body = (
"Your broker has been connected to Quantfortune.\n\n"
f"Broker: {broker}\n"
f"Broker User ID: {broker_user_id or 'N/A'}\n"
)
send_email(user["username"], "Broker connected", body)
except Exception:
pass
return {"connected": True}
@router.get("/status")
async def broker_status(request: Request):
user = _require_user(request)
entry = get_user_broker(user["id"])
if not entry or not entry.get("connected"):
return {"connected": False}
return {
"connected": True,
"broker": entry.get("broker"),
"connected_at": entry.get("connected_at"),
"userName": entry.get("user_name"),
"brokerUserId": entry.get("broker_user_id"),
"authState": entry.get("auth_state"),
}
@router.post("/disconnect")
async def disconnect_broker(request: Request):
user = _require_user(request)
clear_user_broker(user["id"])
set_broker_auth_state(user["id"], "DISCONNECTED")
try:
body = "Your broker connection has been disconnected from Quantfortune."
send_email(user["username"], "Broker disconnected", body)
except Exception:
pass
return {"connected": False}
@router.post("/zerodha/login")
async def zerodha_login(payload: dict, request: Request):
user = _require_user(request)
api_key = (payload.get("apiKey") or "").strip()
api_secret = (payload.get("apiSecret") or "").strip()
redirect_url = (payload.get("redirectUrl") or "").strip()
if not api_key or not api_secret:
raise HTTPException(status_code=400, detail="API key and secret are required")
set_pending_broker(user["id"], "ZERODHA", api_key, api_secret)
return {"loginUrl": build_login_url(api_key, redirect_url=redirect_url or None)}
@router.get("/zerodha/callback")
async def zerodha_callback(request: Request, request_token: str = ""):
user = _require_user(request)
token = request_token.strip()
if not token:
raise HTTPException(status_code=400, detail="Missing request_token")
pending = get_pending_broker(user["id"]) or {}
api_key = (pending.get("api_key") or "").strip()
api_secret = (pending.get("api_secret") or "").strip()
if not api_key or not api_secret:
raise HTTPException(status_code=400, detail="Zerodha login not initialized")
try:
session_data = exchange_request_token(api_key, api_secret, token)
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
access_token = session_data.get("access_token")
if not access_token:
raise HTTPException(status_code=400, detail="Missing access token from Zerodha")
saved = set_session(
user["id"],
{
"api_key": api_key,
"access_token": access_token,
"request_token": session_data.get("request_token", token),
"user_name": session_data.get("user_name"),
"broker_user_id": session_data.get("user_id"),
},
)
set_connected_broker(
user["id"],
"ZERODHA",
access_token,
api_key=api_key,
api_secret=api_secret,
user_name=session_data.get("user_name"),
broker_user_id=session_data.get("user_id"),
auth_state="VALID",
)
return {
"connected": True,
"userName": saved.get("user_name"),
"brokerUserId": saved.get("broker_user_id"),
}
@router.get("/login")
async def broker_login(request: Request):
user = _require_user(request)
creds = get_broker_credentials(user["id"])
if not creds:
raise HTTPException(status_code=400, detail="Broker credentials not configured")
redirect_url = (os.getenv("ZERODHA_REDIRECT_URL") or "").strip()
if not redirect_url:
base = str(request.base_url).rstrip("/")
redirect_url = f"{base}/api/broker/callback"
login_url = build_login_url(creds["api_key"], redirect_url=redirect_url)
return RedirectResponse(login_url)
@router.get("/callback")
async def broker_callback(request: Request, request_token: str = ""):
user = _require_user(request)
token = request_token.strip()
if not token:
raise HTTPException(status_code=400, detail="Missing request_token")
creds = get_broker_credentials(user["id"])
if not creds:
raise HTTPException(status_code=400, detail="Broker credentials not configured")
try:
session_data = exchange_request_token(creds["api_key"], creds["api_secret"], token)
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
access_token = session_data.get("access_token")
if not access_token:
raise HTTPException(status_code=400, detail="Missing access token from Zerodha")
set_session(
user["id"],
{
"api_key": creds["api_key"],
"access_token": access_token,
"request_token": session_data.get("request_token", token),
"user_name": session_data.get("user_name"),
"broker_user_id": session_data.get("user_id"),
},
)
set_connected_broker(
user["id"],
"ZERODHA",
access_token,
api_key=creds["api_key"],
api_secret=creds["api_secret"],
user_name=session_data.get("user_name"),
broker_user_id=session_data.get("user_id"),
auth_state="VALID",
)
target_url = os.getenv("BROKER_DASHBOARD_URL") or "/dashboard?armed=false"
return RedirectResponse(target_url)

12
app/routers/health.py Normal file
View File

@ -0,0 +1,12 @@
from fastapi import APIRouter, HTTPException
from app.services.db import health_check
router = APIRouter()
@router.get("/health")
def health():
if not health_check():
raise HTTPException(status_code=503, detail="db_unavailable")
return {"status": "ok", "db": "ok"}

75
app/routers/paper.py Normal file
View File

@ -0,0 +1,75 @@
from fastapi import APIRouter, HTTPException, Request
from app.services.paper_broker_service import (
add_cash,
get_equity_curve,
get_funds,
get_orders,
get_positions,
get_trades,
reset_paper_state,
)
from app.services.tenant import get_request_user_id
router = APIRouter(prefix="/api/paper")
@router.get("/funds")
def funds(request: Request):
user_id = get_request_user_id(request)
return {"funds": get_funds(user_id)}
@router.get("/positions")
def positions(request: Request):
user_id = get_request_user_id(request)
return {"positions": get_positions(user_id)}
@router.get("/orders")
def orders(request: Request):
user_id = get_request_user_id(request)
return {"orders": get_orders(user_id)}
@router.get("/trades")
def trades(request: Request):
user_id = get_request_user_id(request)
return {"trades": get_trades(user_id)}
@router.get("/equity-curve")
def equity_curve(request: Request):
user_id = get_request_user_id(request)
return get_equity_curve(user_id)
@router.post("/add-cash")
def add_cash_endpoint(request: Request, payload: dict):
try:
amount = float(payload.get("amount", 0))
except (TypeError, ValueError):
raise HTTPException(status_code=400, detail="Invalid amount")
if amount <= 0:
raise HTTPException(status_code=400, detail="Amount must be positive")
try:
user_id = get_request_user_id(request)
add_cash(user_id, amount)
except ValueError as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
return {"funds": get_funds(user_id)}
@router.post("/reset")
def reset_paper(request: Request):
try:
from app.services.strategy_service import stop_strategy
user_id = get_request_user_id(request)
stop_strategy(user_id)
except Exception:
pass
user_id = get_request_user_id(request)
reset_paper_state(user_id)
return {"ok": True, "message": "Paper reset completed"}

View File

@ -0,0 +1,59 @@
from fastapi import APIRouter, HTTPException
from app.models import PasswordResetConfirm, PasswordResetRequest
from app.services.auth_service import (
consume_password_reset_otp,
create_password_reset_otp,
get_user_by_username,
update_user_password,
)
from app.services.email_service import send_email
router = APIRouter(prefix="/api/password-reset")
@router.post("/request")
def request_reset(payload: PasswordResetRequest):
email = payload.email.strip()
if not email:
raise HTTPException(status_code=400, detail="Email is required")
user = get_user_by_username(email)
if not user:
return {"ok": True}
otp = create_password_reset_otp(email)
body = (
"Hi,\n\n"
"We received a request to reset your Quantfortune password.\n\n"
f"Your OTP code is: {otp}\n"
"This code is valid for 10 minutes.\n\n"
"If you did not request this, you can ignore this email.\n\n"
"Quantfortune Support"
)
try:
ok = send_email(email, "Quantfortune Password Reset OTP", body)
except Exception as exc:
raise HTTPException(status_code=500, detail=f"Email send failed: {exc}") from exc
if not ok:
raise HTTPException(status_code=500, detail="Email send failed: SMTP not configured")
return {"ok": True}
@router.post("/confirm")
def confirm_reset(payload: PasswordResetConfirm):
email = payload.email.strip()
otp = payload.otp.strip()
new_password = payload.new_password
if not email or not otp or not new_password:
raise HTTPException(status_code=400, detail="Email, OTP, and new password are required")
user = get_user_by_username(email)
if not user:
raise HTTPException(status_code=400, detail="Invalid OTP or email")
if not consume_password_reset_otp(email, otp):
raise HTTPException(status_code=400, detail="Invalid or expired OTP")
update_user_password(user["id"], new_password)
return {"ok": True}

47
app/routers/strategy.py Normal file
View File

@ -0,0 +1,47 @@
from fastapi import APIRouter, Query, Request
from app.models import StrategyStartRequest
from app.services.strategy_service import (
start_strategy,
stop_strategy,
get_strategy_status,
get_engine_status,
get_market_status,
get_strategy_logs as fetch_strategy_logs,
)
from app.services.tenant import get_request_user_id
router = APIRouter(prefix="/api")
@router.post("/strategy/start")
def start(req: StrategyStartRequest, request: Request):
user_id = get_request_user_id(request)
return start_strategy(req, user_id)
@router.post("/strategy/stop")
def stop(request: Request):
user_id = get_request_user_id(request)
return stop_strategy(user_id)
@router.get("/strategy/status")
def status(request: Request):
user_id = get_request_user_id(request)
return get_strategy_status(user_id)
@router.get("/engine/status")
def engine_status(request: Request):
user_id = get_request_user_id(request)
return get_engine_status(user_id)
@router.get("/market/status")
def market_status():
return get_market_status()
@router.get("/logs")
def get_logs(request: Request, since_seq: int = Query(0)):
user_id = get_request_user_id(request)
return fetch_strategy_logs(user_id, since_seq)
@router.get("/strategy/logs")
def get_strategy_logs_endpoint(request: Request, since_seq: int = Query(0)):
user_id = get_request_user_id(request)
return fetch_strategy_logs(user_id, since_seq)

View File

@ -0,0 +1,39 @@
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from app.services.support_ticket import create_ticket, get_ticket_status
router = APIRouter(prefix="/api/support")
class TicketCreate(BaseModel):
name: str
email: str
subject: str
message: str
class TicketStatusRequest(BaseModel):
email: str
@router.post("/ticket")
def submit_ticket(payload: TicketCreate):
if not payload.subject.strip() or not payload.message.strip():
raise HTTPException(status_code=400, detail="Subject and message are required")
ticket = create_ticket(
name=payload.name.strip(),
email=payload.email.strip(),
subject=payload.subject.strip(),
message=payload.message.strip(),
)
return ticket
@router.post("/ticket/status/{ticket_id}")
def ticket_status(ticket_id: str, payload: TicketStatusRequest):
status = get_ticket_status(ticket_id.strip(), payload.email.strip())
if not status:
raise HTTPException(status_code=404, detail="Ticket not found")
return status

41
app/routers/system.py Normal file
View File

@ -0,0 +1,41 @@
from fastapi import APIRouter, HTTPException, Request
from app.services.auth_service import get_user_for_session
from app.services.system_service import arm_system, system_status
from app.services.zerodha_service import KiteApiError
router = APIRouter(prefix="/api/system")
def _require_user(request: Request):
session_id = request.cookies.get("session_id")
if not session_id:
raise HTTPException(status_code=401, detail="Not authenticated")
user = get_user_for_session(session_id)
if not user:
raise HTTPException(status_code=401, detail="Not authenticated")
return user
@router.post("/arm")
def arm(request: Request):
user = _require_user(request)
try:
result = arm_system(user["id"], client_ip=request.client.host if request.client else None)
except KiteApiError as exc:
raise HTTPException(status_code=502, detail=str(exc)) from exc
if not result.get("ok"):
if result.get("code") == "BROKER_AUTH_REQUIRED":
raise HTTPException(
status_code=401,
detail={"redirect_url": result.get("redirect_url")},
)
raise HTTPException(status_code=400, detail="Unable to arm system")
return result
@router.get("/status")
def status(request: Request):
user = _require_user(request)
return system_status(user["id"])

234
app/routers/zerodha.py Normal file
View File

@ -0,0 +1,234 @@
from datetime import datetime, timedelta
from fastapi import APIRouter, HTTPException, Query, Request
from fastapi.responses import HTMLResponse
from app.broker_store import clear_user_broker
from app.services.auth_service import get_user_for_session
from app.services.zerodha_service import (
KiteApiError,
KiteTokenError,
build_login_url,
exchange_request_token,
fetch_funds,
fetch_holdings,
)
from app.services.zerodha_storage import (
clear_session,
consume_request_token,
get_session,
set_session,
store_request_token,
)
router = APIRouter(prefix="/api/zerodha")
public_router = APIRouter()
def _require_user(request: Request):
session_id = request.cookies.get("session_id")
if not session_id:
raise HTTPException(status_code=401, detail="Not authenticated")
user = get_user_for_session(session_id)
if not user:
raise HTTPException(status_code=401, detail="Not authenticated")
return user
def _capture_request_token(request: Request, request_token: str):
user = _require_user(request)
token = request_token.strip()
if not token:
raise HTTPException(status_code=400, detail="Missing request_token")
store_request_token(user["id"], token)
def _clear_broker_session(user_id: str):
clear_user_broker(user_id)
clear_session(user_id)
def _raise_kite_error(user_id: str, exc: KiteApiError):
if isinstance(exc, KiteTokenError):
_clear_broker_session(user_id)
raise HTTPException(
status_code=401, detail="Zerodha session expired. Please reconnect."
) from exc
raise HTTPException(status_code=502, detail=str(exc)) from exc
@router.post("/login-url")
async def login_url(payload: dict, request: Request):
_require_user(request)
api_key = (payload.get("apiKey") or "").strip()
if not api_key:
raise HTTPException(status_code=400, detail="API key is required")
return {"loginUrl": build_login_url(api_key)}
@router.post("/session")
async def create_session(payload: dict, request: Request):
user = _require_user(request)
api_key = (payload.get("apiKey") or "").strip()
api_secret = (payload.get("apiSecret") or "").strip()
request_token = (payload.get("requestToken") or "").strip()
if not api_key or not api_secret or not request_token:
raise HTTPException(
status_code=400, detail="API key, secret, and request token are required"
)
try:
session_data = exchange_request_token(api_key, api_secret, request_token)
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc)) from exc
saved = set_session(
user["id"],
{
"api_key": api_key,
"access_token": session_data.get("access_token"),
"request_token": session_data.get("request_token", request_token),
"user_name": session_data.get("user_name"),
"broker_user_id": session_data.get("user_id"),
},
)
return {
"connected": True,
"userName": saved.get("user_name"),
"brokerUserId": saved.get("broker_user_id"),
"accessToken": saved.get("access_token"),
}
@router.get("/status")
async def status(request: Request):
user = _require_user(request)
session = get_session(user["id"])
if not session:
return {"connected": False}
return {
"connected": True,
"broker": "zerodha",
"userName": session.get("user_name"),
"linkedAt": session.get("linked_at"),
}
@router.get("/request-token")
async def request_token(request: Request):
user = _require_user(request)
token = consume_request_token(user["id"])
if not token:
raise HTTPException(status_code=404, detail="No request token available.")
return {"requestToken": token}
@router.get("/holdings")
async def holdings(request: Request):
user = _require_user(request)
session = get_session(user["id"])
if not session:
raise HTTPException(status_code=400, detail="Zerodha is not connected")
try:
data = fetch_holdings(session["api_key"], session["access_token"])
except KiteApiError as exc:
_raise_kite_error(user["id"], exc)
return {"holdings": data}
@router.get("/funds")
async def funds(request: Request):
user = _require_user(request)
session = get_session(user["id"])
if not session:
raise HTTPException(status_code=400, detail="Zerodha is not connected")
try:
data = fetch_funds(session["api_key"], session["access_token"])
except KiteApiError as exc:
_raise_kite_error(user["id"], exc)
equity = data.get("equity", {}) if isinstance(data, dict) else {}
return {"funds": {**equity, "raw": data}}
@router.get("/equity-curve")
async def equity_curve(request: Request, from_: str = Query("", alias="from")):
user = _require_user(request)
session = get_session(user["id"])
if not session:
raise HTTPException(status_code=400, detail="Zerodha is not connected")
try:
holdings = fetch_holdings(session["api_key"], session["access_token"])
funds_data = fetch_funds(session["api_key"], session["access_token"])
except KiteApiError as exc:
_raise_kite_error(user["id"], exc)
equity = funds_data.get("equity", {}) if isinstance(funds_data, dict) else {}
total_holdings_value = 0
for item in holdings:
qty = float(item.get("quantity") or item.get("qty") or 0)
last = float(item.get("last_price") or item.get("average_price") or 0)
total_holdings_value += qty * last
total_funds = float(equity.get("cash") or 0)
current_value = max(0, total_holdings_value + total_funds)
ms_in_day = 86400000
now = datetime.utcnow()
default_start = now - timedelta(days=90)
if from_:
try:
start_date = datetime.fromisoformat(from_)
except ValueError:
start_date = default_start
else:
start_date = default_start
if start_date > now:
start_date = now
span_days = max(
2,
int(((now - start_date).total_seconds() * 1000) // ms_in_day),
)
start_value = current_value * 0.85 if current_value > 0 else 10000
points = []
for i in range(span_days):
day = start_date + timedelta(days=i)
progress = i / (span_days - 1)
trend = start_value + (current_value - start_value) * progress
value = max(0, round(trend))
points.append({"date": day.isoformat(), "value": value})
return {
"startDate": start_date.isoformat(),
"endDate": now.isoformat(),
"accountOpenDate": session.get("linked_at"),
"points": points,
}
@router.get("/callback")
async def callback(request: Request, request_token: str = ""):
_capture_request_token(request, request_token)
return {
"status": "ok",
"message": "Request token captured. You can close this tab.",
}
@router.get("/login")
async def login_redirect(request: Request, request_token: str = ""):
return await callback(request, request_token=request_token)
@public_router.get("/login", response_class=HTMLResponse)
async def login_capture(request: Request, request_token: str = ""):
_capture_request_token(request, request_token)
return (
"<html><body style=\"font-family:sans-serif;padding:24px;\">"
"<h3>Request token captured</h3>"
"<p>You can close this tab and return to QuantFortune.</p>"
"</body></html>"
)

0
app/services/__init__.py Normal file
View File

View File

@ -0,0 +1,280 @@
import hashlib
import os
import secrets
from datetime import datetime, timedelta, timezone
from uuid import uuid4
from app.services.db import db_connection
SESSION_TTL_SECONDS = int(os.getenv("SESSION_TTL_SECONDS", str(60 * 60 * 24 * 7)))
SESSION_REFRESH_WINDOW_SECONDS = int(
os.getenv("SESSION_REFRESH_WINDOW_SECONDS", str(60 * 60))
)
RESET_OTP_TTL_MINUTES = int(os.getenv("RESET_OTP_TTL_MINUTES", "10"))
RESET_OTP_SECRET = os.getenv("RESET_OTP_SECRET", "otp_secret")
def _now_utc() -> datetime:
return datetime.now(timezone.utc)
def _new_expiry(now: datetime) -> datetime:
return now + timedelta(seconds=SESSION_TTL_SECONDS)
def _hash_password(password: str) -> str:
return hashlib.sha256(password.encode("utf-8")).hexdigest()
def _hash_otp(email: str, otp: str) -> str:
payload = f"{email}:{otp}:{RESET_OTP_SECRET}"
return hashlib.sha256(payload.encode("utf-8")).hexdigest()
def _row_to_user(row):
if not row:
return None
return {
"id": row[0],
"username": row[1],
"password": row[2],
"role": row[3] if len(row) > 3 else None,
}
def get_user_by_username(username: str):
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"SELECT id, username, password_hash, role FROM app_user WHERE username = %s",
(username,),
)
return _row_to_user(cur.fetchone())
def get_user_by_id(user_id: str):
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"SELECT id, username, password_hash, role FROM app_user WHERE id = %s",
(user_id,),
)
return _row_to_user(cur.fetchone())
def create_user(username: str, password: str):
user_id = str(uuid4())
password_hash = _hash_password(password)
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO app_user (id, username, password_hash, role)
VALUES (%s, %s, %s, 'USER')
ON CONFLICT (username) DO NOTHING
RETURNING id, username, password_hash, role
""",
(user_id, username, password_hash),
)
return _row_to_user(cur.fetchone())
def authenticate_user(username: str, password: str):
user = get_user_by_username(username)
if not user:
return None
if user.get("password") != _hash_password(password):
return None
return user
def verify_user(username: str, password: str):
return authenticate_user(username, password)
def create_session(user_id: str, ip: str | None = None, user_agent: str | None = None) -> str:
session_id = str(uuid4())
now = _now_utc()
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO app_session (id, user_id, created_at, last_seen_at, expires_at, ip, user_agent)
VALUES (%s, %s, %s, %s, %s, %s, %s)
""",
(session_id, user_id, now, now, _new_expiry(now), ip, user_agent),
)
return session_id
def get_last_session_meta(user_id: str):
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT ip, user_agent
FROM app_session
WHERE user_id = %s
ORDER BY created_at DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
if not row:
return {"ip": None, "user_agent": None}
return {"ip": row[0], "user_agent": row[1]}
def update_user_password(user_id: str, new_password: str):
password_hash = _hash_password(new_password)
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"UPDATE app_user SET password_hash = %s WHERE id = %s",
(password_hash, user_id),
)
def create_password_reset_otp(email: str):
otp = f"{secrets.randbelow(10000):04d}"
now = _now_utc()
expires_at = now + timedelta(minutes=RESET_OTP_TTL_MINUTES)
otp_hash = _hash_otp(email, otp)
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO password_reset_otp (id, email, otp_hash, created_at, expires_at, used_at)
VALUES (%s, %s, %s, %s, %s, NULL)
""",
(str(uuid4()), email, otp_hash, now, expires_at),
)
return otp
def consume_password_reset_otp(email: str, otp: str) -> bool:
now = _now_utc()
otp_hash = _hash_otp(email, otp)
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT id
FROM password_reset_otp
WHERE email = %s
AND otp_hash = %s
AND used_at IS NULL
AND expires_at > %s
ORDER BY created_at DESC
LIMIT 1
""",
(email, otp_hash, now),
)
row = cur.fetchone()
if not row:
return False
cur.execute(
"UPDATE password_reset_otp SET used_at = %s WHERE id = %s",
(now, row[0]),
)
return True
def get_session(session_id: str):
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT id, user_id, created_at, last_seen_at, expires_at
FROM app_session
WHERE id = %s
""",
(session_id,),
)
row = cur.fetchone()
if not row:
return None
created_at = row[2].isoformat() if row[2] else None
last_seen_at = row[3].isoformat() if row[3] else None
expires_at = row[4].isoformat() if row[4] else None
return {
"id": row[0],
"user_id": row[1],
"created_at": created_at,
"last_seen_at": last_seen_at,
"expires_at": expires_at,
}
def delete_session(session_id: str):
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute("DELETE FROM app_session WHERE id = %s", (session_id,))
def get_user_for_session(session_id: str):
if not session_id:
return None
now = _now_utc()
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
DELETE FROM app_session
WHERE expires_at IS NOT NULL AND expires_at <= %s
""",
(now,),
)
cur.execute(
"""
SELECT id, user_id, created_at, last_seen_at, expires_at
FROM app_session
WHERE id = %s
""",
(session_id,),
)
row = cur.fetchone()
if not row:
return None
expires_at = row[4]
if expires_at is None:
new_expiry = _new_expiry(now)
cur.execute(
"""
UPDATE app_session
SET expires_at = %s, last_seen_at = %s
WHERE id = %s
""",
(new_expiry, now, session_id),
)
expires_at = new_expiry
if expires_at <= now:
cur.execute("DELETE FROM app_session WHERE id = %s", (session_id,))
return None
if (expires_at - now).total_seconds() <= SESSION_REFRESH_WINDOW_SECONDS:
new_expiry = _new_expiry(now)
cur.execute(
"""
UPDATE app_session
SET expires_at = %s, last_seen_at = %s
WHERE id = %s
""",
(new_expiry, now, session_id),
)
cur.execute(
"SELECT id, username, password_hash, role FROM app_user WHERE id = %s",
(row[1],),
)
return _row_to_user(cur.fetchone())

View File

View File

@ -0,0 +1,39 @@
import os
from cryptography.fernet import Fernet, InvalidToken
ENCRYPTION_PREFIX = "enc:"
KEY_ENV_VAR = "BROKER_TOKEN_KEY"
def _get_fernet() -> Fernet:
key = (os.getenv(KEY_ENV_VAR) or "").strip()
if not key:
raise RuntimeError(f"{KEY_ENV_VAR} is not set")
try:
return Fernet(key.encode("utf-8"))
except Exception as exc:
raise RuntimeError(
f"{KEY_ENV_VAR} must be a urlsafe base64-encoded 32-byte key"
) from exc
def encrypt_value(value: str | None) -> str | None:
if not value:
return value
if value.startswith(ENCRYPTION_PREFIX):
return value
token = _get_fernet().encrypt(value.encode("utf-8")).decode("utf-8")
return f"{ENCRYPTION_PREFIX}{token}"
def decrypt_value(value: str | None) -> str | None:
if not value:
return value
if not value.startswith(ENCRYPTION_PREFIX):
return value
token = value[len(ENCRYPTION_PREFIX) :]
try:
return _get_fernet().decrypt(token.encode("utf-8")).decode("utf-8")
except InvalidToken as exc:
raise RuntimeError("Unable to decrypt token; invalid BROKER_TOKEN_KEY") from exc

210
app/services/db.py Normal file
View File

@ -0,0 +1,210 @@
import os
import threading
import time
from contextlib import contextmanager
from typing import Generator
from sqlalchemy import create_engine, schema, text
from sqlalchemy.engine import Engine, URL
from sqlalchemy.exc import InterfaceError as SAInterfaceError
from sqlalchemy.exc import OperationalError as SAOperationalError
from sqlalchemy.orm import declarative_base, sessionmaker
from psycopg2 import OperationalError as PGOperationalError
from psycopg2 import InterfaceError as PGInterfaceError
Base = declarative_base()
_ENGINE: Engine | None = None
_ENGINE_LOCK = threading.Lock()
class _ConnectionProxy:
def __init__(self, conn):
self._conn = conn
def __getattr__(self, name):
return getattr(self._conn, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
if exc_type is None:
try:
self._conn.commit()
except Exception:
self._conn.rollback()
raise
else:
try:
self._conn.rollback()
except Exception:
pass
return False
def _db_config() -> dict[str, str | int]:
url = os.getenv("DATABASE_URL")
if url:
return {"url": url}
return {
"host": os.getenv("DB_HOST") or os.getenv("PGHOST") or "localhost",
"port": int(os.getenv("DB_PORT") or os.getenv("PGPORT") or "5432"),
"dbname": os.getenv("DB_NAME") or os.getenv("PGDATABASE") or "trading_db",
"user": os.getenv("DB_USER") or os.getenv("PGUSER") or "trader",
"password": os.getenv("DB_PASSWORD") or os.getenv("PGPASSWORD") or "traderpass",
"connect_timeout": int(os.getenv("DB_CONNECT_TIMEOUT", "5")),
"schema": os.getenv("DB_SCHEMA") or os.getenv("PGSCHEMA") or "quant_app",
}
def get_database_url(cfg: dict[str, str | int] | None = None) -> str:
cfg = cfg or _db_config()
if "url" in cfg:
return str(cfg["url"])
schema_name = cfg.get("schema")
query = {"connect_timeout": str(cfg["connect_timeout"])}
if schema_name:
query["options"] = f"-csearch_path={schema_name},public"
url = URL.create(
"postgresql+psycopg2",
username=str(cfg["user"]),
password=str(cfg["password"]),
host=str(cfg["host"]),
port=int(cfg["port"]),
database=str(cfg["dbname"]),
query=query,
)
return url.render_as_string(hide_password=False)
def _create_engine() -> Engine:
cfg = _db_config()
pool_size = int(os.getenv("DB_POOL_SIZE", os.getenv("DB_POOL_MIN", "5")))
max_overflow = int(os.getenv("DB_POOL_MAX", "10"))
pool_timeout = int(os.getenv("DB_POOL_TIMEOUT", "30"))
engine = create_engine(
get_database_url(cfg),
pool_size=pool_size,
max_overflow=max_overflow,
pool_timeout=pool_timeout,
pool_pre_ping=True,
future=True,
)
schema_name = cfg.get("schema")
if schema_name:
try:
with engine.begin() as conn:
conn.execute(schema.CreateSchema(schema_name, if_not_exists=True))
except Exception:
# Schema creation is best-effort; permissions might be limited in some environments.
pass
return engine
def get_engine() -> Engine:
global _ENGINE
if _ENGINE is None:
with _ENGINE_LOCK:
if _ENGINE is None:
_ENGINE = _create_engine()
return _ENGINE
SessionLocal = sessionmaker(
autocommit=False,
autoflush=False,
expire_on_commit=False,
bind=get_engine(),
)
def _get_connection():
return get_engine().raw_connection()
def _put_connection(conn, close=False):
try:
conn.close()
except Exception:
if not close:
raise
@contextmanager
def db_connection(retries: int | None = None, delay: float | None = None):
attempts = retries if retries is not None else int(os.getenv("DB_RETRY_COUNT", "3"))
backoff = delay if delay is not None else float(os.getenv("DB_RETRY_DELAY", "0.2"))
last_error = None
for attempt in range(attempts):
conn = None
try:
conn = _get_connection()
conn.autocommit = False
yield _ConnectionProxy(conn)
return
except (SAOperationalError, SAInterfaceError, PGOperationalError, PGInterfaceError) as exc:
last_error = exc
if conn is not None:
_put_connection(conn)
conn = None
time.sleep(backoff * (2 ** attempt))
continue
finally:
if conn is not None:
_put_connection(conn, close=conn.closed != 0)
if last_error:
raise last_error
def run_with_retry(operation, retries: int | None = None, delay: float | None = None):
attempts = retries if retries is not None else int(os.getenv("DB_RETRY_COUNT", "3"))
backoff = delay if delay is not None else float(os.getenv("DB_RETRY_DELAY", "0.2"))
last_error = None
for attempt in range(attempts):
with db_connection(retries=1) as conn:
try:
with conn.cursor() as cur:
result = operation(cur, conn)
conn.commit()
return result
except (SAOperationalError, SAInterfaceError, PGOperationalError, PGInterfaceError) as exc:
conn.rollback()
last_error = exc
time.sleep(backoff * (2 ** attempt))
continue
except Exception:
conn.rollback()
raise
if last_error:
raise last_error
@contextmanager
def db_transaction():
with db_connection() as conn:
try:
with conn.cursor() as cur:
yield cur
conn.commit()
except Exception:
conn.rollback()
raise
def get_db() -> Generator:
db = SessionLocal()
try:
yield db
finally:
db.close()
def health_check() -> bool:
try:
with get_engine().connect() as conn:
conn.execute(text("SELECT 1"))
return True
except Exception:
return False

View File

@ -0,0 +1,28 @@
import os
import smtplib
import ssl
from email.message import EmailMessage
def send_email(to_email: str, subject: str, body_text: str) -> bool:
smtp_user = (os.getenv("SMTP_USER") or "").strip()
smtp_pass = (os.getenv("SMTP_PASS") or "").replace(" ", "").strip()
smtp_host = (os.getenv("SMTP_HOST") or "smtp.gmail.com").strip()
smtp_port = int((os.getenv("SMTP_PORT") or "587").strip())
from_name = (os.getenv("SMTP_FROM_NAME") or "Quantfortune Support").strip()
if not smtp_user or not smtp_pass:
return False
msg = EmailMessage()
msg["From"] = f"{from_name} <{smtp_user}>"
msg["To"] = to_email
msg["Subject"] = subject
msg.set_content(body_text)
context = ssl.create_default_context()
with smtplib.SMTP(smtp_host, smtp_port) as server:
server.starttls(context=context)
server.login(smtp_user, smtp_pass)
server.send_message(msg)
return True

View File

@ -0,0 +1,191 @@
import os
import sys
from pathlib import Path
PROJECT_ROOT = Path(__file__).resolve().parents[3]
if str(PROJECT_ROOT) not in sys.path:
sys.path.append(str(PROJECT_ROOT))
from indian_paper_trading_strategy.engine.broker import PaperBroker
from indian_paper_trading_strategy.engine.state import load_state, save_state
from indian_paper_trading_strategy.engine.db import engine_context, insert_engine_event
from app.services.db import run_with_retry
from app.services.run_service import get_active_run_id, get_running_run_id
_logged_path = False
def _broker():
global _logged_path
state = load_state(mode="PAPER")
initial_cash = float(state.get("initial_cash", 0))
broker = PaperBroker(initial_cash=initial_cash)
if not _logged_path:
_logged_path = True
print(
"PaperBroker store path:",
{
"cwd": os.getcwd(),
"paper_store_path": str(broker.store_path) if hasattr(broker, "store_path") else "NO_STORE_PATH",
"abs_store_path": os.path.abspath(str(broker.store_path)) if hasattr(broker, "store_path") else "N/A",
},
)
return broker
def get_paper_broker(user_id: str):
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
return _broker()
def get_funds(user_id: str):
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
return _broker().get_funds()
def get_positions(user_id: str):
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
positions = _broker().get_positions()
enriched = []
for item in positions:
qty = float(item.get("qty", 0))
avg = float(item.get("avg_price", 0))
ltp = float(item.get("last_price", 0))
pnl = (ltp - avg) * qty
pnl_pct = ((ltp - avg) / avg * 100) if avg else 0.0
enriched.append(
{
**item,
"pnl": pnl,
"pnl_pct": pnl_pct,
}
)
return enriched
def get_orders(user_id: str):
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
return _broker().get_orders()
def get_trades(user_id: str):
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
return _broker().get_trades()
def get_equity_curve(user_id: str):
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
broker = _broker()
points = broker.get_equity_curve()
if not points:
return []
state = load_state(mode="PAPER")
initial_cash = float(state.get("initial_cash", 0))
response = []
for point in points:
equity = float(point.get("equity", 0))
pnl = point.get("pnl")
if pnl is None:
pnl = equity - float(initial_cash)
response.append(
{
"timestamp": point.get("timestamp"),
"equity": equity,
"pnl": float(pnl),
}
)
return response
def add_cash(user_id: str, amount: float):
if amount <= 0:
raise ValueError("Amount must be positive")
run_id = get_running_run_id(user_id)
if not run_id:
raise ValueError("Strategy must be running to add cash")
def _op(cur, _conn):
with engine_context(user_id, run_id):
state = load_state(mode="PAPER", cur=cur, for_update=True)
initial_cash = float(state.get("initial_cash", 0))
broker = PaperBroker(initial_cash=initial_cash)
store = broker._load_store(cur=cur, for_update=True)
cash = float(store.get("cash", 0)) + amount
store["cash"] = cash
broker._save_store(store, cur=cur)
state["cash"] = cash
state["initial_cash"] = initial_cash + amount
state["total_invested"] = float(state.get("total_invested", 0)) + amount
save_state(
state,
mode="PAPER",
cur=cur,
emit_event=True,
event_meta={"source": "add_cash"},
)
insert_engine_event(
cur,
"CASH_ADDED",
data={"amount": amount, "cash": cash},
)
return state
return run_with_retry(_op)
def reset_paper_state(user_id: str):
run_id = get_active_run_id(user_id)
def _op(cur, _conn):
with engine_context(user_id, run_id):
cur.execute(
"DELETE FROM strategy_log WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM engine_event WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM paper_equity_curve WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM paper_trade WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM paper_order WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM paper_position WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM paper_broker_account WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM mtm_ledger WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM event_ledger WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
cur.execute(
"DELETE FROM engine_state_paper WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
insert_engine_event(cur, "PAPER_RESET", data={})
run_with_retry(_op)

View File

@ -0,0 +1,22 @@
class RunLifecycleError(Exception):
pass
class RunLifecycleManager:
ARMABLE = {"STOPPED", "PAUSED_AUTH_EXPIRED"}
@classmethod
def assert_can_arm(cls, status: str):
normalized = (status or "").strip().upper()
if normalized == "RUNNING":
raise RunLifecycleError("Run already RUNNING")
if normalized == "ERROR":
raise RunLifecycleError("Run in ERROR must be reset before arming")
if normalized not in cls.ARMABLE:
raise RunLifecycleError(f"Run cannot be armed from status {normalized}")
return normalized
@classmethod
def is_armable(cls, status: str) -> bool:
normalized = (status or "").strip().upper()
return normalized in cls.ARMABLE

176
app/services/run_service.py Normal file
View File

@ -0,0 +1,176 @@
import threading
from datetime import datetime, timezone
from uuid import uuid4
from psycopg2.extras import Json
from app.services.db import run_with_retry
_DEFAULT_USER_ID = None
_DEFAULT_LOCK = threading.Lock()
def _utc_now():
return datetime.now(timezone.utc)
def get_default_user_id():
global _DEFAULT_USER_ID
if _DEFAULT_USER_ID:
return _DEFAULT_USER_ID
def _op(cur, _conn):
cur.execute("SELECT id FROM app_user ORDER BY username LIMIT 1")
row = cur.fetchone()
return row[0] if row else None
user_id = run_with_retry(_op)
if user_id:
with _DEFAULT_LOCK:
_DEFAULT_USER_ID = user_id
return user_id
def _default_run_id(user_id: str) -> str:
return f"default_{user_id}"
def ensure_default_run(user_id: str):
run_id = _default_run_id(user_id)
def _op(cur, _conn):
now = _utc_now()
cur.execute(
"""
INSERT INTO strategy_run (
run_id, user_id, created_at, started_at, stopped_at, status, strategy, mode, broker, meta
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (run_id) DO NOTHING
""",
(
run_id,
user_id,
now,
None,
None,
"STOPPED",
None,
None,
None,
Json({}),
),
)
return run_id
return run_with_retry(_op)
def get_active_run_id(user_id: str):
def _op(cur, _conn):
cur.execute(
"""
SELECT run_id
FROM strategy_run
WHERE user_id = %s AND status = 'RUNNING'
ORDER BY created_at DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
if row:
return row[0]
cur.execute(
"""
SELECT run_id
FROM strategy_run
WHERE user_id = %s
ORDER BY created_at DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
if row:
return row[0]
return None
run_id = run_with_retry(_op)
if run_id:
return run_id
return ensure_default_run(user_id)
def get_running_run_id(user_id: str):
def _op(cur, _conn):
cur.execute(
"""
SELECT run_id
FROM strategy_run
WHERE user_id = %s AND status = 'RUNNING'
ORDER BY created_at DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
return row[0] if row else None
return run_with_retry(_op)
def create_strategy_run(user_id: str, strategy: str | None, mode: str | None, broker: str | None, meta: dict | None):
run_id = str(uuid4())
def _op(cur, _conn):
now = _utc_now()
cur.execute(
"""
INSERT INTO strategy_run (
run_id, user_id, created_at, started_at, stopped_at, status, strategy, mode, broker, meta
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
""",
(
run_id,
user_id,
now,
now,
None,
"RUNNING",
strategy,
mode,
broker,
Json(meta or {}),
),
)
return run_id
return run_with_retry(_op)
def update_run_status(user_id: str, run_id: str, status: str, meta: dict | None = None):
def _op(cur, _conn):
now = _utc_now()
if status == "RUNNING":
cur.execute(
"""
UPDATE strategy_run
SET status = %s, started_at = COALESCE(started_at, %s), meta = COALESCE(meta, '{}'::jsonb) || %s
WHERE run_id = %s AND user_id = %s
""",
(status, now, Json(meta or {}), run_id, user_id),
)
else:
cur.execute(
"""
UPDATE strategy_run
SET status = %s, stopped_at = %s, meta = COALESCE(meta, '{}'::jsonb) || %s
WHERE run_id = %s AND user_id = %s
""",
(status, now, Json(meta or {}), run_id, user_id),
)
return True
return run_with_retry(_op)

View File

@ -0,0 +1,650 @@
import json
import os
import sys
import threading
from datetime import datetime, timedelta, timezone
from pathlib import Path
ENGINE_ROOT = Path(__file__).resolve().parents[3]
if str(ENGINE_ROOT) not in sys.path:
sys.path.append(str(ENGINE_ROOT))
from indian_paper_trading_strategy.engine.market import is_market_open, align_to_market_open
from indian_paper_trading_strategy.engine.runner import start_engine, stop_engine
from indian_paper_trading_strategy.engine.state import init_paper_state, load_state
from indian_paper_trading_strategy.engine.broker import PaperBroker
from indian_paper_trading_strategy.engine.time_utils import frequency_to_timedelta
from indian_paper_trading_strategy.engine.db import engine_context
from app.services.db import db_connection
from app.services.run_service import (
create_strategy_run,
get_active_run_id,
get_running_run_id,
update_run_status,
)
from app.services.auth_service import get_user_by_id
from app.services.email_service import send_email
from psycopg2.extras import Json
from psycopg2 import errors
SEQ_LOCK = threading.Lock()
SEQ = 0
LAST_WAIT_LOG_TS = {}
WAIT_LOG_INTERVAL = timedelta(seconds=60)
def init_log_state():
global SEQ
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute("SELECT COALESCE(MAX(seq), 0) FROM strategy_log")
row = cur.fetchone()
SEQ = row[0] if row and row[0] is not None else 0
def start_new_run(user_id: str, run_id: str):
LAST_WAIT_LOG_TS.pop(run_id, None)
emit_event(
user_id=user_id,
run_id=run_id,
event="STRATEGY_STARTED",
message="Strategy started",
meta={},
)
def stop_run(user_id: str, run_id: str, reason="user_request"):
emit_event(
user_id=user_id,
run_id=run_id,
event="STRATEGY_STOPPED",
message="Strategy stopped",
meta={"reason": reason},
)
def emit_event(
*,
user_id: str,
run_id: str,
event: str,
message: str,
level: str = "INFO",
category: str = "ENGINE",
meta: dict | None = None
):
global SEQ, LAST_WAIT_LOG_TS
if not user_id or not run_id:
return
now = datetime.now(timezone.utc)
if event == "SIP_WAITING":
last_ts = LAST_WAIT_LOG_TS.get(run_id)
if last_ts and (now - last_ts) < WAIT_LOG_INTERVAL:
return
LAST_WAIT_LOG_TS[run_id] = now
with SEQ_LOCK:
SEQ += 1
seq = SEQ
evt = {
"seq": seq,
"ts": now.isoformat().replace("+00:00", "Z"),
"level": level,
"category": category,
"event": event,
"message": message,
"run_id": run_id,
"meta": meta or {}
}
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO strategy_log (
seq, ts, level, category, event, message, user_id, run_id, meta
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (seq) DO NOTHING
""",
(
evt["seq"],
now,
evt["level"],
evt["category"],
evt["event"],
evt["message"],
user_id,
evt["run_id"],
Json(evt["meta"]),
),
)
def _maybe_parse_json(value):
if value is None:
return None
if not isinstance(value, str):
return value
text = value.strip()
if not text:
return None
try:
return json.loads(text)
except Exception:
return value
def _local_tz():
return datetime.now().astimezone().tzinfo
def _format_local_ts(value: datetime | None):
if value is None:
return None
return value.astimezone(_local_tz()).replace(tzinfo=None).isoformat()
def _load_config(user_id: str, run_id: str):
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT strategy, sip_amount, sip_frequency_value, sip_frequency_unit,
mode, broker, active, frequency, frequency_days, unit, next_run
FROM strategy_config
WHERE user_id = %s AND run_id = %s
LIMIT 1
""",
(user_id, run_id),
)
row = cur.fetchone()
if not row:
return {}
cfg = {
"strategy": row[0],
"sip_amount": float(row[1]) if row[1] is not None else None,
"mode": row[4],
"broker": row[5],
"active": row[6],
"frequency": _maybe_parse_json(row[7]),
"frequency_days": row[8],
"unit": row[9],
"next_run": _format_local_ts(row[10]),
}
if row[2] is not None or row[3] is not None:
cfg["sip_frequency"] = {
"value": row[2],
"unit": row[3],
}
return cfg
def _save_config(cfg, user_id: str, run_id: str):
sip_frequency = cfg.get("sip_frequency")
sip_value = None
sip_unit = None
if isinstance(sip_frequency, dict):
sip_value = sip_frequency.get("value")
sip_unit = sip_frequency.get("unit")
frequency = cfg.get("frequency")
if not isinstance(frequency, str) and frequency is not None:
frequency = json.dumps(frequency)
next_run = cfg.get("next_run")
next_run_dt = None
if isinstance(next_run, str):
try:
parsed = datetime.fromisoformat(next_run)
if parsed.tzinfo is None:
parsed = parsed.replace(tzinfo=_local_tz())
next_run_dt = parsed
except ValueError:
next_run_dt = None
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO strategy_config (
user_id,
run_id,
strategy,
sip_amount,
sip_frequency_value,
sip_frequency_unit,
mode,
broker,
active,
frequency,
frequency_days,
unit,
next_run
)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (user_id, run_id) DO UPDATE
SET strategy = EXCLUDED.strategy,
sip_amount = EXCLUDED.sip_amount,
sip_frequency_value = EXCLUDED.sip_frequency_value,
sip_frequency_unit = EXCLUDED.sip_frequency_unit,
mode = EXCLUDED.mode,
broker = EXCLUDED.broker,
active = EXCLUDED.active,
frequency = EXCLUDED.frequency,
frequency_days = EXCLUDED.frequency_days,
unit = EXCLUDED.unit,
next_run = EXCLUDED.next_run
""",
(
user_id,
run_id,
cfg.get("strategy"),
cfg.get("sip_amount"),
sip_value,
sip_unit,
cfg.get("mode"),
cfg.get("broker"),
cfg.get("active"),
frequency,
cfg.get("frequency_days"),
cfg.get("unit"),
next_run_dt,
),
)
def save_strategy_config(cfg, user_id: str, run_id: str):
_save_config(cfg, user_id, run_id)
def deactivate_strategy_config(user_id: str, run_id: str):
cfg = _load_config(user_id, run_id)
cfg["active"] = False
_save_config(cfg, user_id, run_id)
def _write_status(user_id: str, run_id: str, status):
now_local = datetime.now().astimezone()
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO engine_status (user_id, run_id, status, last_updated)
VALUES (%s, %s, %s, %s)
ON CONFLICT (user_id, run_id) DO UPDATE
SET status = EXCLUDED.status,
last_updated = EXCLUDED.last_updated
""",
(user_id, run_id, status, now_local),
)
def validate_frequency(freq: dict, mode: str):
if not isinstance(freq, dict):
raise ValueError("Frequency payload is required")
value = int(freq.get("value", 0))
unit = freq.get("unit")
if unit not in {"minutes", "days"}:
raise ValueError(f"Unsupported frequency unit: {unit}")
if unit == "minutes":
if mode != "PAPER":
raise ValueError("Minute-level frequency allowed only in PAPER mode")
if value < 1:
raise ValueError("Minimum frequency is 1 minute")
if unit == "days" and value < 1:
raise ValueError("Minimum frequency is 1 day")
def compute_next_eligible(last_run: str | None, sip_frequency: dict | None):
if not last_run or not sip_frequency:
return None
try:
last_dt = datetime.fromisoformat(last_run)
except ValueError:
return None
try:
delta = frequency_to_timedelta(sip_frequency)
except ValueError:
return None
next_dt = last_dt + delta
next_dt = align_to_market_open(next_dt)
return next_dt.isoformat()
def start_strategy(req, user_id: str):
engine_external = os.getenv("ENGINE_EXTERNAL", "").strip().lower() in {"1", "true", "yes"}
running_run_id = get_running_run_id(user_id)
if running_run_id:
if engine_external:
return {"status": "already_running", "run_id": running_run_id}
engine_config = _build_engine_config(user_id, running_run_id, req)
if engine_config:
started = start_engine(engine_config)
if started:
_write_status(user_id, running_run_id, "RUNNING")
return {"status": "restarted", "run_id": running_run_id}
return {"status": "already_running", "run_id": running_run_id}
mode = (req.mode or "PAPER").strip().upper()
if mode != "PAPER":
return {"status": "unsupported_mode"}
frequency_payload = req.sip_frequency.dict() if hasattr(req.sip_frequency, "dict") else dict(req.sip_frequency)
validate_frequency(frequency_payload, mode)
initial_cash = float(req.initial_cash) if req.initial_cash is not None else 1_000_000.0
try:
run_id = create_strategy_run(
user_id,
strategy=req.strategy_name,
mode=mode,
broker="paper",
meta={
"sip_amount": req.sip_amount,
"sip_frequency": frequency_payload,
"initial_cash": initial_cash,
},
)
except errors.UniqueViolation:
return {"status": "already_running"}
with engine_context(user_id, run_id):
init_paper_state(initial_cash, frequency_payload)
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO paper_broker_account (user_id, run_id, cash)
VALUES (%s, %s, %s)
ON CONFLICT (user_id, run_id) DO UPDATE
SET cash = EXCLUDED.cash
""",
(user_id, run_id, initial_cash),
)
PaperBroker(initial_cash=initial_cash)
config = {
"strategy": req.strategy_name,
"sip_amount": req.sip_amount,
"sip_frequency": frequency_payload,
"mode": mode,
"broker": "paper",
"active": True,
}
save_strategy_config(config, user_id, run_id)
start_new_run(user_id, run_id)
_write_status(user_id, run_id, "RUNNING")
if not engine_external:
def emit_event_cb(*, event: str, message: str, level: str = "INFO", category: str = "ENGINE", meta: dict | None = None):
emit_event(
user_id=user_id,
run_id=run_id,
event=event,
message=message,
level=level,
category=category,
meta=meta,
)
engine_config = dict(config)
engine_config["initial_cash"] = initial_cash
engine_config["run_id"] = run_id
engine_config["user_id"] = user_id
engine_config["emit_event"] = emit_event_cb
start_engine(engine_config)
try:
user = get_user_by_id(user_id)
if user:
body = (
"Your strategy has been started.\n\n"
f"Strategy: {req.strategy_name}\n"
f"Mode: {mode}\n"
f"Run ID: {run_id}\n"
)
send_email(user["username"], "Strategy started", body)
except Exception:
pass
return {"status": "started", "run_id": run_id}
def _build_engine_config(user_id: str, run_id: str, req=None):
cfg = _load_config(user_id, run_id)
sip_frequency = cfg.get("sip_frequency")
if not isinstance(sip_frequency, dict) and req is not None:
sip_frequency = req.sip_frequency.dict() if hasattr(req.sip_frequency, "dict") else dict(req.sip_frequency)
if not isinstance(sip_frequency, dict):
sip_frequency = {"value": cfg.get("frequency_days") or 1, "unit": cfg.get("unit") or "days"}
sip_amount = cfg.get("sip_amount")
if sip_amount is None and req is not None:
sip_amount = req.sip_amount
mode = (cfg.get("mode") or (req.mode if req is not None else "PAPER") or "PAPER").strip().upper()
broker = cfg.get("broker") or "paper"
strategy_name = cfg.get("strategy") or cfg.get("strategy_name") or (req.strategy_name if req is not None else None)
with engine_context(user_id, run_id):
state = load_state(mode=mode)
initial_cash = float(state.get("initial_cash") or 1_000_000.0)
def emit_event_cb(*, event: str, message: str, level: str = "INFO", category: str = "ENGINE", meta: dict | None = None):
emit_event(
user_id=user_id,
run_id=run_id,
event=event,
message=message,
level=level,
category=category,
meta=meta,
)
return {
"strategy": strategy_name or "Golden Nifty",
"sip_amount": sip_amount or 0,
"sip_frequency": sip_frequency,
"mode": mode,
"broker": broker,
"active": cfg.get("active", True),
"initial_cash": initial_cash,
"user_id": user_id,
"run_id": run_id,
"emit_event": emit_event_cb,
}
def resume_running_runs():
engine_external = os.getenv("ENGINE_EXTERNAL", "").strip().lower() in {"1", "true", "yes"}
if engine_external:
return
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT user_id, run_id
FROM strategy_run
WHERE status = 'RUNNING'
ORDER BY created_at DESC
"""
)
runs = cur.fetchall()
for user_id, run_id in runs:
engine_config = _build_engine_config(user_id, run_id, None)
if not engine_config:
continue
started = start_engine(engine_config)
if started:
_write_status(user_id, run_id, "RUNNING")
def stop_strategy(user_id: str):
run_id = get_active_run_id(user_id)
engine_external = os.getenv("ENGINE_EXTERNAL", "").strip().lower() in {"1", "true", "yes"}
if not engine_external:
stop_engine(user_id, run_id, timeout=15.0)
deactivate_strategy_config(user_id, run_id)
stop_run(user_id, run_id, reason="user_request")
_write_status(user_id, run_id, "STOPPED")
update_run_status(user_id, run_id, "STOPPED", meta={"reason": "user_request"})
try:
user = get_user_by_id(user_id)
if user:
body = "Your strategy has been stopped."
send_email(user["username"], "Strategy stopped", body)
except Exception:
pass
return {"status": "stopped"}
def get_strategy_status(user_id: str):
run_id = get_active_run_id(user_id)
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"SELECT status, last_updated FROM engine_status WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
row = cur.fetchone()
if not row:
status = {"status": "IDLE", "last_updated": None}
else:
status = {
"status": row[0],
"last_updated": _format_local_ts(row[1]),
}
if status.get("status") == "RUNNING":
cfg = _load_config(user_id, run_id)
mode = (cfg.get("mode") or "LIVE").strip().upper()
with engine_context(user_id, run_id):
state = load_state(mode=mode)
last_execution_ts = state.get("last_run") or state.get("last_sip_ts")
sip_frequency = cfg.get("sip_frequency")
if not isinstance(sip_frequency, dict):
frequency = cfg.get("frequency")
unit = cfg.get("unit")
if isinstance(frequency, dict):
unit = frequency.get("unit", unit)
frequency = frequency.get("value")
if frequency is None and cfg.get("frequency_days") is not None:
frequency = cfg.get("frequency_days")
unit = unit or "days"
if frequency is not None and unit:
sip_frequency = {"value": frequency, "unit": unit}
next_eligible = compute_next_eligible(last_execution_ts, sip_frequency)
status["last_execution_ts"] = last_execution_ts
status["next_eligible_ts"] = next_eligible
if next_eligible:
try:
parsed_next = datetime.fromisoformat(next_eligible)
now_cmp = datetime.now(parsed_next.tzinfo) if parsed_next.tzinfo else datetime.now()
if parsed_next > now_cmp:
status["status"] = "WAITING"
except ValueError:
pass
return status
def get_engine_status(user_id: str):
run_id = get_active_run_id(user_id)
status = {
"state": "STOPPED",
"run_id": run_id,
"user_id": user_id,
"last_heartbeat_ts": None,
}
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT status, last_updated
FROM engine_status
WHERE user_id = %s AND run_id = %s
ORDER BY last_updated DESC
LIMIT 1
""",
(user_id, run_id),
)
row = cur.fetchone()
if row:
status["state"] = row[0]
last_updated = row[1]
if last_updated is not None:
status["last_heartbeat_ts"] = (
last_updated.astimezone(timezone.utc)
.isoformat()
.replace("+00:00", "Z")
)
cfg = _load_config(user_id, run_id)
mode = (cfg.get("mode") or "LIVE").strip().upper()
with engine_context(user_id, run_id):
state = load_state(mode=mode)
last_execution_ts = state.get("last_run") or state.get("last_sip_ts")
sip_frequency = cfg.get("sip_frequency")
if isinstance(sip_frequency, dict):
sip_frequency = {
"value": sip_frequency.get("value"),
"unit": sip_frequency.get("unit"),
}
else:
frequency = cfg.get("frequency")
unit = cfg.get("unit")
if isinstance(frequency, dict):
unit = frequency.get("unit", unit)
frequency = frequency.get("value")
if frequency is None and cfg.get("frequency_days") is not None:
frequency = cfg.get("frequency_days")
unit = unit or "days"
if frequency is not None and unit:
sip_frequency = {"value": frequency, "unit": unit}
status["last_execution_ts"] = last_execution_ts
status["next_eligible_ts"] = compute_next_eligible(last_execution_ts, sip_frequency)
status["run_id"] = run_id
return status
def get_strategy_logs(user_id: str, since_seq: int):
run_id = get_active_run_id(user_id)
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT seq, ts, level, category, event, message, run_id, meta
FROM strategy_log
WHERE user_id = %s AND run_id = %s AND seq > %s
ORDER BY seq
""",
(user_id, run_id, since_seq),
)
rows = cur.fetchall()
events = []
for row in rows:
ts = row[1]
if ts is not None:
ts_str = ts.astimezone(timezone.utc).isoformat().replace("+00:00", "Z")
else:
ts_str = None
events.append(
{
"seq": row[0],
"ts": ts_str,
"level": row[2],
"category": row[3],
"event": row[4],
"message": row[5],
"run_id": row[6],
"meta": row[7] if isinstance(row[7], dict) else {},
}
)
cur.execute(
"SELECT COALESCE(MAX(seq), 0) FROM strategy_log WHERE user_id = %s AND run_id = %s",
(user_id, run_id),
)
latest_seq = cur.fetchone()[0]
return {"events": events, "latest_seq": latest_seq}
def get_market_status():
now = datetime.now()
return {
"status": "OPEN" if is_market_open(now) else "CLOSED",
"checked_at": now.isoformat(),
}

View File

@ -0,0 +1,70 @@
import os
from datetime import datetime, timezone
from uuid import uuid4
from app.services.db import db_connection
from app.services.email_service import send_email
def _now():
return datetime.now(timezone.utc)
def create_ticket(name: str, email: str, subject: str, message: str) -> dict:
ticket_id = str(uuid4())
now = _now()
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO support_ticket
(id, name, email, subject, message, status, created_at, updated_at)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
""",
(ticket_id, name, email, subject, message, "NEW", now, now),
)
email_sent = False
try:
email_body = (
"Hi,\n\n"
"Your support ticket has been created.\n\n"
f"Ticket ID: {ticket_id}\n"
f"Subject: {subject}\n"
"Status: NEW\n\n"
"We will get back to you shortly.\n\n"
"Quantfortune Support"
)
email_sent = send_email(email, "Quantfortune Support Ticket Created", email_body)
except Exception:
email_sent = False
return {
"ticket_id": ticket_id,
"status": "NEW",
"created_at": now.isoformat(),
"email_sent": email_sent,
}
def get_ticket_status(ticket_id: str, email: str) -> dict | None:
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT id, email, status, created_at, updated_at
FROM support_ticket
WHERE id = %s
""",
(ticket_id,),
)
row = cur.fetchone()
if not row:
return None
if row[1].lower() != email.lower():
return None
return {
"ticket_id": row[0],
"status": row[2],
"created_at": row[3].isoformat() if row[3] else None,
"updated_at": row[4].isoformat() if row[4] else None,
}

View File

@ -0,0 +1,378 @@
import hashlib
import json
import os
from datetime import datetime, timezone
from psycopg2.extras import Json
from app.broker_store import get_user_broker, set_broker_auth_state
from app.services.db import db_connection
from app.services.run_lifecycle import RunLifecycleError, RunLifecycleManager
from app.services.strategy_service import compute_next_eligible, resume_running_runs
from app.services.zerodha_service import KiteTokenError, fetch_funds
from app.services.zerodha_storage import get_session
def _hash_value(value: str | None) -> str | None:
if value is None:
return None
return hashlib.sha256(value.encode("utf-8")).hexdigest()
def _parse_frequency(raw_value):
if raw_value is None:
return None
if isinstance(raw_value, dict):
return raw_value
if isinstance(raw_value, str):
text = raw_value.strip()
if not text:
return None
try:
return json.loads(text)
except Exception:
return None
return None
def _resolve_sip_frequency(row: dict):
value = row.get("sip_frequency_value")
unit = row.get("sip_frequency_unit")
if value is not None and unit:
return {"value": int(value), "unit": unit}
frequency = _parse_frequency(row.get("frequency"))
if isinstance(frequency, dict):
freq_value = frequency.get("value")
freq_unit = frequency.get("unit")
if freq_value is not None and freq_unit:
return {"value": int(freq_value), "unit": freq_unit}
fallback_value = row.get("frequency_days")
fallback_unit = row.get("unit") or "days"
if fallback_value is not None:
return {"value": int(fallback_value), "unit": fallback_unit}
return None
def _parse_ts(value: str | None):
if not value:
return None
try:
return datetime.fromisoformat(value)
except ValueError:
return None
def _validate_broker_session(user_id: str):
session = get_session(user_id)
if not session:
return False
if os.getenv("BROKER_VALIDATION_MODE", "").strip().lower() == "skip":
return True
try:
fetch_funds(session["api_key"], session["access_token"])
except KiteTokenError:
set_broker_auth_state(user_id, "EXPIRED")
return False
return True
def arm_system(user_id: str, client_ip: str | None = None):
if not _validate_broker_session(user_id):
return {
"ok": False,
"code": "BROKER_AUTH_REQUIRED",
"redirect_url": "/api/broker/login",
}
now = datetime.now(timezone.utc)
armed_runs = []
failed_runs = []
next_runs = []
with db_connection() as conn:
with conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT sr.run_id, sr.status, sr.strategy, sr.mode, sr.broker,
sc.active, sc.sip_frequency_value, sc.sip_frequency_unit,
sc.frequency, sc.frequency_days, sc.unit, sc.next_run
FROM strategy_run sr
LEFT JOIN strategy_config sc
ON sc.user_id = sr.user_id AND sc.run_id = sr.run_id
WHERE sr.user_id = %s AND COALESCE(sc.active, false) = true
ORDER BY sr.created_at DESC
""",
(user_id,),
)
rows = cur.fetchall()
cur.execute("SELECT username FROM app_user WHERE id = %s", (user_id,))
user_row = cur.fetchone()
username = user_row[0] if user_row else None
for row in rows:
run = {
"run_id": row[0],
"status": row[1],
"strategy": row[2],
"mode": row[3],
"broker": row[4],
"active": row[5],
"sip_frequency_value": row[6],
"sip_frequency_unit": row[7],
"frequency": row[8],
"frequency_days": row[9],
"unit": row[10],
"next_run": row[11],
}
status = (run["status"] or "").strip().upper()
if status == "RUNNING":
armed_runs.append(
{
"run_id": run["run_id"],
"status": status,
"already_running": True,
}
)
if run.get("next_run"):
next_runs.append(run["next_run"])
continue
if status == "ERROR":
failed_runs.append(
{
"run_id": run["run_id"],
"status": status,
"reason": "ERROR",
}
)
continue
try:
RunLifecycleManager.assert_can_arm(status)
except RunLifecycleError as exc:
failed_runs.append(
{
"run_id": run["run_id"],
"status": status,
"reason": str(exc),
}
)
continue
sip_frequency = _resolve_sip_frequency(run)
last_run = now.isoformat()
next_run = compute_next_eligible(last_run, sip_frequency)
next_run_dt = _parse_ts(next_run)
cur.execute(
"""
UPDATE strategy_run
SET status = 'RUNNING',
started_at = COALESCE(started_at, %s),
stopped_at = NULL,
meta = COALESCE(meta, '{}'::jsonb) || %s
WHERE user_id = %s AND run_id = %s
""",
(
now,
Json({"armed_at": now.isoformat()}),
user_id,
run["run_id"],
),
)
cur.execute(
"""
INSERT INTO engine_status (user_id, run_id, status, last_updated)
VALUES (%s, %s, %s, %s)
ON CONFLICT (user_id, run_id) DO UPDATE
SET status = EXCLUDED.status,
last_updated = EXCLUDED.last_updated
""",
(user_id, run["run_id"], "RUNNING", now),
)
if (run.get("mode") or "").strip().upper() == "PAPER":
cur.execute(
"""
INSERT INTO engine_state_paper (user_id, run_id, last_run)
VALUES (%s, %s, %s)
ON CONFLICT (user_id, run_id) DO UPDATE
SET last_run = EXCLUDED.last_run
""",
(user_id, run["run_id"], now),
)
else:
cur.execute(
"""
INSERT INTO engine_state (user_id, run_id, last_run)
VALUES (%s, %s, %s)
ON CONFLICT (user_id, run_id) DO UPDATE
SET last_run = EXCLUDED.last_run
""",
(user_id, run["run_id"], now),
)
cur.execute(
"""
UPDATE strategy_config
SET next_run = %s
WHERE user_id = %s AND run_id = %s
""",
(next_run_dt, user_id, run["run_id"]),
)
logical_time = now.replace(microsecond=0)
cur.execute(
"""
INSERT INTO engine_event (user_id, run_id, ts, event, message, meta)
VALUES (%s, %s, %s, %s, %s, %s)
""",
(
user_id,
run["run_id"],
now,
"SYSTEM_ARMED",
"System armed",
Json({"next_run": next_run}),
),
)
cur.execute(
"""
INSERT INTO engine_event (user_id, run_id, ts, event, message, meta)
VALUES (%s, %s, %s, %s, %s, %s)
""",
(
user_id,
run["run_id"],
now,
"RUN_REARMED",
"Run re-armed",
Json({"next_run": next_run}),
),
)
cur.execute(
"""
INSERT INTO event_ledger (
user_id, run_id, timestamp, logical_time, event
)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (user_id, run_id, event, logical_time) DO NOTHING
""",
(
user_id,
run["run_id"],
now,
logical_time,
"SYSTEM_ARMED",
),
)
cur.execute(
"""
INSERT INTO event_ledger (
user_id, run_id, timestamp, logical_time, event
)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT (user_id, run_id, event, logical_time) DO NOTHING
""",
(
user_id,
run["run_id"],
now,
logical_time,
"RUN_REARMED",
),
)
armed_runs.append(
{
"run_id": run["run_id"],
"status": "RUNNING",
"next_run": next_run,
}
)
if next_run_dt:
next_runs.append(next_run_dt)
audit_meta = {
"run_count": len(armed_runs),
"ip": client_ip,
}
cur.execute(
"""
INSERT INTO admin_audit_log
(actor_user_hash, target_user_hash, target_username_hash, action, meta)
VALUES (%s, %s, %s, %s, %s)
""",
(
_hash_value(user_id),
_hash_value(user_id),
_hash_value(username),
"SYSTEM_ARM",
Json(audit_meta),
),
)
try:
resume_running_runs()
except Exception:
pass
broker_state = get_user_broker(user_id) or {}
next_execution = min(next_runs).isoformat() if next_runs else None
return {
"ok": True,
"armed_runs": armed_runs,
"failed_runs": failed_runs,
"next_execution": next_execution,
"broker_state": {
"connected": bool(broker_state.get("connected")),
"auth_state": broker_state.get("auth_state"),
"broker": broker_state.get("broker"),
"user_name": broker_state.get("user_name"),
},
}
def system_status(user_id: str):
broker_state = get_user_broker(user_id) or {}
with db_connection() as conn:
with conn.cursor() as cur:
cur.execute(
"""
SELECT sr.run_id, sr.status, sr.strategy, sr.mode, sr.broker,
sc.next_run, sc.active
FROM strategy_run sr
LEFT JOIN strategy_config sc
ON sc.user_id = sr.user_id AND sc.run_id = sr.run_id
WHERE sr.user_id = %s
ORDER BY sr.created_at DESC
""",
(user_id,),
)
rows = cur.fetchall()
runs = [
{
"run_id": row[0],
"status": row[1],
"strategy": row[2],
"mode": row[3],
"broker": row[4],
"next_run": row[5].isoformat() if row[5] else None,
"active": bool(row[6]) if row[6] is not None else False,
"lifecycle": row[1],
}
for row in rows
]
return {
"runs": runs,
"broker_state": {
"connected": bool(broker_state.get("connected")),
"auth_state": broker_state.get("auth_state"),
"broker": broker_state.get("broker"),
"user_name": broker_state.get("user_name"),
},
}

19
app/services/tenant.py Normal file
View File

@ -0,0 +1,19 @@
from fastapi import HTTPException, Request
from app.services.auth_service import get_user_for_session
from app.services.run_service import get_default_user_id
SESSION_COOKIE_NAME = "session_id"
def get_request_user_id(request: Request) -> str:
session_id = request.cookies.get(SESSION_COOKIE_NAME)
if session_id:
user = get_user_for_session(session_id)
if user:
return user["id"]
default_user_id = get_default_user_id()
if default_user_id:
return default_user_id
raise HTTPException(status_code=401, detail="Not authenticated")

View File

@ -0,0 +1,89 @@
import hashlib
import json
import os
import urllib.error
import urllib.parse
import urllib.request
KITE_API_BASE = os.getenv("KITE_API_BASE", "https://api.kite.trade")
KITE_LOGIN_URL = os.getenv("KITE_LOGIN_URL", "https://kite.trade/connect/login")
KITE_VERSION = "3"
class KiteApiError(Exception):
def __init__(self, status_code: int, error_type: str, message: str):
super().__init__(f"Kite API error {status_code}: {error_type} - {message}")
self.status_code = status_code
self.error_type = error_type
self.message = message
class KiteTokenError(KiteApiError):
pass
def build_login_url(api_key: str, redirect_url: str | None = None) -> str:
params = {"api_key": api_key, "v": KITE_VERSION}
redirect_url = (redirect_url or os.getenv("ZERODHA_REDIRECT_URL") or "").strip()
if redirect_url:
params["redirect_url"] = redirect_url
query = urllib.parse.urlencode(params)
return f"{KITE_LOGIN_URL}?{query}"
def _request(method: str, url: str, data: dict | None = None, headers: dict | None = None):
payload = None
if data is not None:
payload = urllib.parse.urlencode(data).encode("utf-8")
req = urllib.request.Request(url, data=payload, headers=headers or {}, method=method)
try:
with urllib.request.urlopen(req, timeout=20) as resp:
body = resp.read().decode("utf-8")
except urllib.error.HTTPError as err:
error_body = err.read().decode("utf-8") if err.fp else ""
try:
payload = json.loads(error_body) if error_body else {}
except json.JSONDecodeError:
payload = {}
error_type = payload.get("error_type") or payload.get("status") or "unknown_error"
message = payload.get("message") or error_body or err.reason
exc_cls = KiteTokenError if error_type == "TokenException" else KiteApiError
raise exc_cls(err.code, error_type, message) from err
return json.loads(body)
def _auth_headers(api_key: str, access_token: str) -> dict:
return {
"X-Kite-Version": KITE_VERSION,
"Authorization": f"token {api_key}:{access_token}",
}
def exchange_request_token(api_key: str, api_secret: str, request_token: str) -> dict:
checksum = hashlib.sha256(
f"{api_key}{request_token}{api_secret}".encode("utf-8")
).hexdigest()
url = f"{KITE_API_BASE}/session/token"
response = _request(
"POST",
url,
data={
"api_key": api_key,
"request_token": request_token,
"checksum": checksum,
},
)
return response.get("data", {})
def fetch_holdings(api_key: str, access_token: str) -> list:
url = f"{KITE_API_BASE}/portfolio/holdings"
response = _request("GET", url, headers=_auth_headers(api_key, access_token))
return response.get("data", [])
def fetch_funds(api_key: str, access_token: str) -> dict:
url = f"{KITE_API_BASE}/user/margins"
response = _request("GET", url, headers=_auth_headers(api_key, access_token))
return response.get("data", {})

View File

@ -0,0 +1,125 @@
from datetime import datetime, timezone
from app.services.crypto_service import decrypt_value, encrypt_value
from app.services.db import db_transaction
def _row_to_session(row):
access_token = decrypt_value(row[1]) if row[1] else None
request_token = decrypt_value(row[2]) if row[2] else None
return {
"api_key": row[0],
"access_token": access_token,
"request_token": request_token,
"user_name": row[3],
"broker_user_id": row[4],
"linked_at": row[5],
}
def get_session(user_id: str):
with db_transaction() as cur:
cur.execute(
"""
SELECT api_key, access_token, request_token, user_name, broker_user_id, linked_at
FROM zerodha_session
WHERE user_id = %s
ORDER BY linked_at DESC NULLS LAST, id DESC
LIMIT 1
""",
(user_id,),
)
row = cur.fetchone()
if row:
return _row_to_session(row)
with db_transaction() as cur:
cur.execute(
"""
SELECT broker, connected, access_token, api_key, user_name, broker_user_id, connected_at
FROM user_broker
WHERE user_id = %s
""",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
broker, connected, access_token, api_key, user_name, broker_user_id, connected_at = row
if not connected or not access_token or not api_key:
return None
if (broker or "").strip().upper() != "ZERODHA":
return None
return {
"api_key": api_key,
"access_token": decrypt_value(access_token),
"request_token": None,
"user_name": user_name,
"broker_user_id": broker_user_id,
"linked_at": connected_at,
}
def set_session(user_id: str, data: dict):
access_token = data.get("access_token")
request_token = data.get("request_token")
linked_at = datetime.now(timezone.utc)
with db_transaction() as cur:
cur.execute(
"""
INSERT INTO zerodha_session (
user_id, linked_at, api_key, access_token, request_token, user_name, broker_user_id
)
VALUES (%s, %s, %s, %s, %s, %s, %s)
RETURNING linked_at
""",
(
user_id,
linked_at,
data.get("api_key"),
encrypt_value(access_token) if access_token else None,
encrypt_value(request_token) if request_token else None,
data.get("user_name"),
data.get("broker_user_id"),
),
)
linked_at = cur.fetchone()[0]
return {
**data,
"user_id": user_id,
"linked_at": linked_at,
"access_token": access_token,
"request_token": request_token,
}
def store_request_token(user_id: str, request_token: str):
with db_transaction() as cur:
cur.execute(
"""
INSERT INTO zerodha_request_token (user_id, request_token)
VALUES (%s, %s)
ON CONFLICT (user_id)
DO UPDATE SET request_token = EXCLUDED.request_token
""",
(user_id, encrypt_value(request_token)),
)
def consume_request_token(user_id: str):
with db_transaction() as cur:
cur.execute(
"SELECT request_token FROM zerodha_request_token WHERE user_id = %s",
(user_id,),
)
row = cur.fetchone()
if not row:
return None
cur.execute("DELETE FROM zerodha_request_token WHERE user_id = %s", (user_id,))
return decrypt_value(row[0])
def clear_session(user_id: str):
with db_transaction() as cur:
cur.execute("DELETE FROM zerodha_session WHERE user_id = %s", (user_id,))
cur.execute("DELETE FROM zerodha_request_token WHERE user_id = %s", (user_id,))

91
market.py Normal file
View File

@ -0,0 +1,91 @@
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict
import sys
import time
from fastapi import APIRouter
PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path:
sys.path.append(str(PROJECT_ROOT))
from indian_paper_trading_strategy.engine.data import fetch_live_price, get_price_snapshot
NIFTY = "NIFTYBEES.NS"
GOLD = "GOLDBEES.NS"
router = APIRouter(prefix="/api/market", tags=["market"])
_LTP_CACHE: Dict[str, Any] = {
"ts_epoch": 0.0,
"data": None,
}
CACHE_TTL_SECONDS = 5
STALE_SECONDS = 60
@router.get("/ltp")
def get_ltp(allow_cache: bool = False):
now_epoch = time.time()
cached = _LTP_CACHE["data"]
if cached is not None and (now_epoch - _LTP_CACHE["ts_epoch"]) < CACHE_TTL_SECONDS:
return cached
nifty_ltp = None
gold_ltp = None
try:
nifty_ltp = fetch_live_price(NIFTY)
except Exception:
nifty_ltp = None
try:
gold_ltp = fetch_live_price(GOLD)
except Exception:
gold_ltp = None
nifty_meta = get_price_snapshot(NIFTY) or {}
gold_meta = get_price_snapshot(GOLD) or {}
now = datetime.now(timezone.utc)
def _is_stale(meta: Dict[str, Any], ltp: float | None) -> bool:
if ltp is None:
return True
source = meta.get("source")
ts = meta.get("ts")
if source != "live":
return True
if isinstance(ts, datetime):
return (now - ts).total_seconds() > STALE_SECONDS
return False
nifty_source = str(nifty_meta.get("source") or "").lower()
gold_source = str(gold_meta.get("source") or "").lower()
stale_map = {
NIFTY: _is_stale(nifty_meta, nifty_ltp),
GOLD: _is_stale(gold_meta, gold_ltp),
}
stale_any = stale_map[NIFTY] or stale_map[GOLD]
if allow_cache and stale_any:
cache_sources = {"cache", "cached", "history"}
if nifty_source in cache_sources and gold_source in cache_sources:
stale_map = {NIFTY: False, GOLD: False}
stale_any = False
payload = {
"ts": now.isoformat(),
"ltp": {
NIFTY: float(nifty_ltp) if nifty_ltp is not None else None,
GOLD: float(gold_ltp) if gold_ltp is not None else None,
},
"source": {
NIFTY: nifty_meta.get("source"),
GOLD: gold_meta.get("source"),
},
"stale": stale_map,
"stale_any": stale_any,
}
_LTP_CACHE["ts_epoch"] = now_epoch
_LTP_CACHE["data"] = payload
return payload

1
migrations/README Normal file
View File

@ -0,0 +1 @@
Generic single-database configuration.

87
migrations/env.py Normal file
View File

@ -0,0 +1,87 @@
import os
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
from app.services.db import Base, get_database_url
import app.db_models # noqa: F401
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
db_url = get_database_url()
if "%" in db_url:
db_url = db_url.replace("%", "%%")
config.set_main_option("sqlalchemy.url", db_url)
schema_name = os.getenv("DB_SCHEMA") or os.getenv("PGSCHEMA") or "quant_app"
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
version_table_schema=schema_name,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
configuration = config.get_section(config.config_ini_section, {})
connectable = engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
version_table_schema=schema_name,
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
migrations/script.py.mako Normal file
View File

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,674 @@
"""initial_schema
Revision ID: 52abc790351d
Revises:
Create Date: 2026-01-18 08:34:50.268181
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '52abc790351d'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_audit_log',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('ts', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('actor_user_hash', sa.Text(), nullable=False),
sa.Column('target_user_hash', sa.Text(), nullable=False),
sa.Column('target_username_hash', sa.Text(), nullable=True),
sa.Column('action', sa.Text(), nullable=False),
sa.Column('meta', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('admin_role_audit',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('actor_user_id', sa.String(), nullable=False),
sa.Column('target_user_id', sa.String(), nullable=False),
sa.Column('old_role', sa.String(), nullable=False),
sa.Column('new_role', sa.String(), nullable=False),
sa.Column('changed_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('app_user',
sa.Column('id', sa.String(), nullable=False),
sa.Column('username', sa.String(), nullable=False),
sa.Column('password_hash', sa.String(), nullable=False),
sa.Column('is_admin', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('is_super_admin', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('role', sa.String(), server_default=sa.text("'USER'"), nullable=False),
sa.CheckConstraint("role IN ('USER','ADMIN','SUPER_ADMIN')", name='chk_app_user_role'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_index('idx_app_user_is_admin', 'app_user', ['is_admin'], unique=False)
op.create_index('idx_app_user_is_super_admin', 'app_user', ['is_super_admin'], unique=False)
op.create_index('idx_app_user_role', 'app_user', ['role'], unique=False)
op.create_table('market_close',
sa.Column('symbol', sa.String(), nullable=False),
sa.Column('date', sa.Date(), nullable=False),
sa.Column('close', sa.Numeric(), nullable=False),
sa.PrimaryKeyConstraint('symbol', 'date')
)
op.create_index('idx_market_close_date', 'market_close', ['date'], unique=False)
op.create_index('idx_market_close_symbol', 'market_close', ['symbol'], unique=False)
op.create_table('app_session',
sa.Column('id', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('last_seen_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_app_session_expires_at', 'app_session', ['expires_at'], unique=False)
op.create_index('idx_app_session_user_id', 'app_session', ['user_id'], unique=False)
op.create_table('strategy_run',
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('stopped_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('status', sa.String(), nullable=False),
sa.Column('strategy', sa.String(), nullable=True),
sa.Column('mode', sa.String(), nullable=True),
sa.Column('broker', sa.String(), nullable=True),
sa.Column('meta', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.CheckConstraint("status IN ('RUNNING','STOPPED','ERROR')", name='chk_strategy_run_status'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('run_id'),
sa.UniqueConstraint('user_id', 'run_id', name='uq_strategy_run_user_run')
)
op.create_index('idx_strategy_run_user_created', 'strategy_run', ['user_id', 'created_at'], unique=False)
op.create_index('idx_strategy_run_user_status', 'strategy_run', ['user_id', 'status'], unique=False)
op.create_index('uq_one_running_run_per_user', 'strategy_run', ['user_id'], unique=True, postgresql_where=sa.text("status = 'RUNNING'"))
op.create_table('user_broker',
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('broker', sa.String(), nullable=True),
sa.Column('connected', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('access_token', sa.Text(), nullable=True),
sa.Column('connected_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('api_key', sa.Text(), nullable=True),
sa.Column('user_name', sa.Text(), nullable=True),
sa.Column('broker_user_id', sa.Text(), nullable=True),
sa.Column('pending_broker', sa.Text(), nullable=True),
sa.Column('pending_api_key', sa.Text(), nullable=True),
sa.Column('pending_api_secret', sa.Text(), nullable=True),
sa.Column('pending_started_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id')
)
op.create_index('idx_user_broker_broker', 'user_broker', ['broker'], unique=False)
op.create_index('idx_user_broker_connected', 'user_broker', ['connected'], unique=False)
op.create_table('zerodha_request_token',
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('request_token', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id')
)
op.create_table('zerodha_session',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('linked_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('api_key', sa.Text(), nullable=True),
sa.Column('access_token', sa.Text(), nullable=True),
sa.Column('request_token', sa.Text(), nullable=True),
sa.Column('user_name', sa.Text(), nullable=True),
sa.Column('broker_user_id', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_zerodha_session_linked_at', 'zerodha_session', ['linked_at'], unique=False)
op.create_index('idx_zerodha_session_user_id', 'zerodha_session', ['user_id'], unique=False)
op.create_table('engine_event',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('ts', sa.DateTime(timezone=True), nullable=False),
sa.Column('event', sa.String(), nullable=True),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('meta', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['run_id'], ['strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index('idx_engine_event_ts', 'engine_event', ['ts'], unique=False)
op.create_index('idx_engine_event_user_run_ts', 'engine_event', ['user_id', 'run_id', 'ts'], unique=False)
op.create_table('engine_state',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('total_invested', sa.Numeric(), nullable=True),
sa.Column('nifty_units', sa.Numeric(), nullable=True),
sa.Column('gold_units', sa.Numeric(), nullable=True),
sa.Column('last_sip_ts', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_run', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', name='uq_engine_state_user_run')
)
op.create_table('engine_state_paper',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('initial_cash', sa.Numeric(), nullable=True),
sa.Column('cash', sa.Numeric(), nullable=True),
sa.Column('total_invested', sa.Numeric(), nullable=True),
sa.Column('nifty_units', sa.Numeric(), nullable=True),
sa.Column('gold_units', sa.Numeric(), nullable=True),
sa.Column('last_sip_ts', sa.DateTime(timezone=True), nullable=True),
sa.Column('last_run', sa.DateTime(timezone=True), nullable=True),
sa.Column('sip_frequency_value', sa.Integer(), nullable=True),
sa.Column('sip_frequency_unit', sa.String(), nullable=True),
sa.CheckConstraint('cash >= 0', name='chk_engine_state_paper_cash_non_negative'),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', name='uq_engine_state_paper_user_run')
)
op.create_table('engine_status',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('status', sa.String(), nullable=False),
sa.Column('last_updated', sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', name='uq_engine_status_user_run')
)
op.create_index('idx_engine_status_user_run', 'engine_status', ['user_id', 'run_id'], unique=False)
op.create_table('event_ledger',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('logical_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('event', sa.String(), nullable=False),
sa.Column('nifty_units', sa.Numeric(), nullable=True),
sa.Column('gold_units', sa.Numeric(), nullable=True),
sa.Column('nifty_price', sa.Numeric(), nullable=True),
sa.Column('gold_price', sa.Numeric(), nullable=True),
sa.Column('amount', sa.Numeric(), nullable=True),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', 'event', 'logical_time', name='uq_event_ledger_event_time')
)
op.create_index('idx_event_ledger_ts', 'event_ledger', ['timestamp'], unique=False)
op.create_index('idx_event_ledger_user_run_ts', 'event_ledger', ['user_id', 'run_id', 'timestamp'], unique=False)
op.create_table('mtm_ledger',
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('logical_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('nifty_units', sa.Numeric(), nullable=True),
sa.Column('gold_units', sa.Numeric(), nullable=True),
sa.Column('nifty_price', sa.Numeric(), nullable=True),
sa.Column('gold_price', sa.Numeric(), nullable=True),
sa.Column('nifty_value', sa.Numeric(), nullable=True),
sa.Column('gold_value', sa.Numeric(), nullable=True),
sa.Column('portfolio_value', sa.Numeric(), nullable=True),
sa.Column('total_invested', sa.Numeric(), nullable=True),
sa.Column('pnl', sa.Numeric(), nullable=True),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id', 'run_id', 'logical_time')
)
op.create_index('idx_mtm_ledger_ts', 'mtm_ledger', ['timestamp'], unique=False)
op.create_index('idx_mtm_ledger_user_run_ts', 'mtm_ledger', ['user_id', 'run_id', 'timestamp'], unique=False)
op.create_table('paper_broker_account',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('cash', sa.Numeric(), nullable=False),
sa.CheckConstraint('cash >= 0', name='chk_paper_broker_cash_non_negative'),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', name='uq_paper_broker_account_user_run')
)
op.create_table('paper_equity_curve',
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('logical_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('equity', sa.Numeric(), nullable=False),
sa.Column('pnl', sa.Numeric(), nullable=True),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id', 'run_id', 'logical_time')
)
op.create_index('idx_paper_equity_curve_ts', 'paper_equity_curve', ['timestamp'], unique=False)
op.create_index('idx_paper_equity_curve_user_run_ts', 'paper_equity_curve', ['user_id', 'run_id', 'timestamp'], unique=False)
op.create_table('paper_order',
sa.Column('id', sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('symbol', sa.String(), nullable=False),
sa.Column('side', sa.String(), nullable=False),
sa.Column('qty', sa.Numeric(), nullable=False),
sa.Column('price', sa.Numeric(), nullable=True),
sa.Column('status', sa.String(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('logical_time', sa.DateTime(timezone=True), nullable=False),
sa.CheckConstraint('price >= 0', name='chk_paper_order_price_non_negative'),
sa.CheckConstraint('qty > 0', name='chk_paper_order_qty_positive'),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', 'id', name='uq_paper_order_scope_id'),
sa.UniqueConstraint('user_id', 'run_id', 'logical_time', 'symbol', 'side', name='uq_paper_order_logical_key')
)
op.create_index('idx_paper_order_ts', 'paper_order', ['timestamp'], unique=False)
op.create_index('idx_paper_order_user_run_ts', 'paper_order', ['user_id', 'run_id', 'timestamp'], unique=False)
op.create_table('paper_position',
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('symbol', sa.String(), nullable=False),
sa.Column('qty', sa.Numeric(), nullable=False),
sa.Column('avg_price', sa.Numeric(), nullable=True),
sa.Column('last_price', sa.Numeric(), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.CheckConstraint('qty > 0', name='chk_paper_position_qty_positive'),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id', 'run_id', 'symbol'),
sa.UniqueConstraint('user_id', 'run_id', 'symbol', name='uq_paper_position_scope')
)
op.create_index('idx_paper_position_user_run', 'paper_position', ['user_id', 'run_id'], unique=False)
op.create_table('strategy_config',
sa.Column('id', sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column('strategy', sa.String(), nullable=True),
sa.Column('sip_amount', sa.Numeric(), nullable=True),
sa.Column('sip_frequency_value', sa.Integer(), nullable=True),
sa.Column('sip_frequency_unit', sa.String(), nullable=True),
sa.Column('mode', sa.String(), nullable=True),
sa.Column('broker', sa.String(), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('frequency', sa.Text(), nullable=True),
sa.Column('frequency_days', sa.Integer(), nullable=True),
sa.Column('unit', sa.String(), nullable=True),
sa.Column('next_run', sa.DateTime(timezone=True), nullable=True),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['run_id'], ['strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', name='uq_strategy_config_user_run')
)
op.create_table('strategy_log',
sa.Column('seq', sa.BigInteger(), nullable=False),
sa.Column('ts', sa.DateTime(timezone=True), nullable=False),
sa.Column('level', sa.String(), nullable=True),
sa.Column('category', sa.String(), nullable=True),
sa.Column('event', sa.String(), nullable=True),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('meta', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['run_id'], ['strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('seq')
)
op.create_index('idx_strategy_log_event', 'strategy_log', ['event'], unique=False)
op.create_index('idx_strategy_log_ts', 'strategy_log', ['ts'], unique=False)
op.create_index('idx_strategy_log_user_run_ts', 'strategy_log', ['user_id', 'run_id', 'ts'], unique=False)
op.create_table('paper_trade',
sa.Column('id', sa.String(), nullable=False),
sa.Column('order_id', sa.String(), nullable=True),
sa.Column('user_id', sa.String(), nullable=False),
sa.Column('run_id', sa.String(), nullable=False),
sa.Column('symbol', sa.String(), nullable=False),
sa.Column('side', sa.String(), nullable=False),
sa.Column('qty', sa.Numeric(), nullable=False),
sa.Column('price', sa.Numeric(), nullable=False),
sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False),
sa.Column('logical_time', sa.DateTime(timezone=True), nullable=False),
sa.CheckConstraint('price >= 0', name='chk_paper_trade_price_non_negative'),
sa.CheckConstraint('qty > 0', name='chk_paper_trade_qty_positive'),
sa.ForeignKeyConstraint(['user_id', 'run_id', 'order_id'], ['paper_order.user_id', 'paper_order.run_id', 'paper_order.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id', 'run_id'], ['strategy_run.user_id', 'strategy_run.run_id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['app_user.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'run_id', 'id', name='uq_paper_trade_scope_id'),
sa.UniqueConstraint('user_id', 'run_id', 'logical_time', 'symbol', 'side', name='uq_paper_trade_logical_key')
)
op.create_index('idx_paper_trade_ts', 'paper_trade', ['timestamp'], unique=False)
op.create_index('idx_paper_trade_user_run_ts', 'paper_trade', ['user_id', 'run_id', 'timestamp'], unique=False)
# admin views and protections
op.execute(
"""
CREATE OR REPLACE FUNCTION prevent_super_admin_delete()
RETURNS trigger AS $$
BEGIN
IF OLD.role = 'SUPER_ADMIN' OR OLD.is_super_admin THEN
RAISE EXCEPTION 'cannot delete super admin user';
END IF;
RETURN OLD;
END;
$$ LANGUAGE plpgsql;
"""
)
op.execute(
"""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'trg_prevent_super_admin_delete') THEN
CREATE TRIGGER trg_prevent_super_admin_delete
BEFORE DELETE ON app_user
FOR EACH ROW
EXECUTE FUNCTION prevent_super_admin_delete();
END IF;
END $$;
"""
)
op.create_index('idx_event_ledger_user_run_logical', 'event_ledger', ['user_id', 'run_id', 'logical_time'], unique=False)
op.execute(
"""
CREATE OR REPLACE VIEW admin_user_metrics AS
WITH session_stats AS (
SELECT
user_id,
MIN(created_at) AS first_session_at,
MAX(COALESCE(last_seen_at, created_at)) AS last_login_at
FROM app_session
GROUP BY user_id
),
run_stats AS (
SELECT
user_id,
COUNT(*) AS runs_count,
MAX(CASE WHEN status = 'RUNNING' THEN run_id END) AS active_run_id,
MAX(CASE WHEN status = 'RUNNING' THEN status END) AS active_run_status,
MIN(created_at) AS first_run_at
FROM strategy_run
GROUP BY user_id
),
broker_stats AS (
SELECT user_id, BOOL_OR(connected) AS broker_connected
FROM user_broker
GROUP BY user_id
)
SELECT
u.id AS user_id,
u.username,
u.role,
(u.role IN ('ADMIN','SUPER_ADMIN')) AS is_admin,
COALESCE(session_stats.first_session_at, run_stats.first_run_at) AS created_at,
session_stats.last_login_at,
COALESCE(run_stats.runs_count, 0) AS runs_count,
run_stats.active_run_id,
run_stats.active_run_status,
COALESCE(broker_stats.broker_connected, FALSE) AS broker_connected
FROM app_user u
LEFT JOIN session_stats ON session_stats.user_id = u.id
LEFT JOIN run_stats ON run_stats.user_id = u.id
LEFT JOIN broker_stats ON broker_stats.user_id = u.id;
"""
)
op.execute(
"""
CREATE OR REPLACE VIEW admin_run_metrics AS
WITH order_stats AS (
SELECT user_id, run_id, COUNT(*) AS order_count, MAX("timestamp") AS last_order_time
FROM paper_order
GROUP BY user_id, run_id
),
trade_stats AS (
SELECT user_id, run_id, COUNT(*) AS trade_count, MAX("timestamp") AS last_trade_time
FROM paper_trade
GROUP BY user_id, run_id
),
event_stats AS (
SELECT
user_id,
run_id,
MAX("timestamp") AS last_event_time,
MAX(CASE WHEN event = 'SIP_EXECUTED' THEN "timestamp" END) AS last_sip_time
FROM event_ledger
GROUP BY user_id, run_id
),
equity_latest AS (
SELECT DISTINCT ON (user_id, run_id)
user_id,
run_id,
equity AS equity_latest,
pnl AS pnl_latest,
"timestamp" AS equity_ts
FROM paper_equity_curve
ORDER BY user_id, run_id, "timestamp" DESC
),
mtm_latest AS (
SELECT DISTINCT ON (user_id, run_id)
user_id,
run_id,
"timestamp" AS mtm_ts
FROM mtm_ledger
ORDER BY user_id, run_id, "timestamp" DESC
),
log_latest AS (
SELECT user_id, run_id, MAX(ts) AS last_log_time
FROM strategy_log
GROUP BY user_id, run_id
),
engine_latest AS (
SELECT user_id, run_id, MAX(ts) AS last_engine_time
FROM engine_event
GROUP BY user_id, run_id
),
activity AS (
SELECT user_id, run_id, MAX(ts) AS last_event_time
FROM (
SELECT user_id, run_id, ts FROM engine_event
UNION ALL
SELECT user_id, run_id, ts FROM strategy_log
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM paper_order
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM paper_trade
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM mtm_ledger
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM paper_equity_curve
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM event_ledger
) t
GROUP BY user_id, run_id
)
SELECT
sr.run_id,
sr.user_id,
sr.status,
sr.created_at,
sr.started_at,
sr.stopped_at,
sr.strategy,
sr.mode,
sr.broker,
sc.sip_amount,
sc.sip_frequency_value,
sc.sip_frequency_unit,
sc.next_run AS next_sip_time,
activity.last_event_time,
event_stats.last_sip_time,
COALESCE(order_stats.order_count, 0) AS order_count,
COALESCE(trade_stats.trade_count, 0) AS trade_count,
equity_latest.equity_latest,
equity_latest.pnl_latest
FROM strategy_run sr
LEFT JOIN strategy_config sc
ON sc.user_id = sr.user_id AND sc.run_id = sr.run_id
LEFT JOIN order_stats
ON order_stats.user_id = sr.user_id AND order_stats.run_id = sr.run_id
LEFT JOIN trade_stats
ON trade_stats.user_id = sr.user_id AND trade_stats.run_id = sr.run_id
LEFT JOIN event_stats
ON event_stats.user_id = sr.user_id AND event_stats.run_id = sr.run_id
LEFT JOIN equity_latest
ON equity_latest.user_id = sr.user_id AND equity_latest.run_id = sr.run_id
LEFT JOIN mtm_latest
ON mtm_latest.user_id = sr.user_id AND mtm_latest.run_id = sr.run_id
LEFT JOIN log_latest
ON log_latest.user_id = sr.user_id AND log_latest.run_id = sr.run_id
LEFT JOIN engine_latest
ON engine_latest.user_id = sr.user_id AND engine_latest.run_id = sr.run_id
LEFT JOIN activity
ON activity.user_id = sr.user_id AND activity.run_id = sr.run_id;
"""
)
op.execute(
"""
CREATE OR REPLACE VIEW admin_engine_health AS
WITH activity AS (
SELECT user_id, run_id, MAX(ts) AS last_event_time
FROM (
SELECT user_id, run_id, ts FROM engine_event
UNION ALL
SELECT user_id, run_id, ts FROM strategy_log
UNION ALL
SELECT user_id, run_id, "timestamp" AS ts FROM event_ledger
) t
GROUP BY user_id, run_id
)
SELECT
sr.run_id,
sr.user_id,
sr.status,
activity.last_event_time,
es.status AS engine_status,
es.last_updated AS engine_status_ts
FROM strategy_run sr
LEFT JOIN activity
ON activity.user_id = sr.user_id AND activity.run_id = sr.run_id
LEFT JOIN engine_status es
ON es.user_id = sr.user_id AND es.run_id = sr.run_id;
"""
)
op.execute(
"""
CREATE OR REPLACE VIEW admin_order_stats AS
SELECT
user_id,
run_id,
COUNT(*) AS total_orders,
COUNT(*) FILTER (WHERE "timestamp" >= now() - interval '24 hours') AS orders_last_24h,
COUNT(*) FILTER (WHERE status = 'FILLED') AS filled_orders
FROM paper_order
GROUP BY user_id, run_id;
"""
)
op.execute(
"""
CREATE OR REPLACE VIEW admin_ledger_stats AS
WITH mtm_latest AS (
SELECT DISTINCT ON (user_id, run_id)
user_id,
run_id,
portfolio_value,
pnl,
"timestamp" AS mtm_ts
FROM mtm_ledger
ORDER BY user_id, run_id, "timestamp" DESC
),
equity_latest AS (
SELECT DISTINCT ON (user_id, run_id)
user_id,
run_id,
equity,
pnl,
"timestamp" AS equity_ts
FROM paper_equity_curve
ORDER BY user_id, run_id, "timestamp" DESC
)
SELECT
sr.user_id,
sr.run_id,
mtm_latest.portfolio_value AS mtm_value,
mtm_latest.pnl AS mtm_pnl,
mtm_latest.mtm_ts,
equity_latest.equity AS equity_value,
equity_latest.pnl AS equity_pnl,
equity_latest.equity_ts
FROM strategy_run sr
LEFT JOIN mtm_latest
ON mtm_latest.user_id = sr.user_id AND mtm_latest.run_id = sr.run_id
LEFT JOIN equity_latest
ON equity_latest.user_id = sr.user_id AND equity_latest.run_id = sr.run_id;
"""
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DROP VIEW IF EXISTS admin_ledger_stats;")
op.execute("DROP VIEW IF EXISTS admin_order_stats;")
op.execute("DROP VIEW IF EXISTS admin_engine_health;")
op.execute("DROP VIEW IF EXISTS admin_run_metrics;")
op.execute("DROP VIEW IF EXISTS admin_user_metrics;")
op.execute("DROP TRIGGER IF EXISTS trg_prevent_super_admin_delete ON app_user;")
op.execute("DROP FUNCTION IF EXISTS prevent_super_admin_delete;")
op.drop_index('idx_paper_trade_user_run_ts', table_name='paper_trade')
op.drop_index('idx_paper_trade_ts', table_name='paper_trade')
op.drop_table('paper_trade')
op.drop_index('idx_strategy_log_user_run_ts', table_name='strategy_log')
op.drop_index('idx_strategy_log_ts', table_name='strategy_log')
op.drop_index('idx_strategy_log_event', table_name='strategy_log')
op.drop_table('strategy_log')
op.drop_table('strategy_config')
op.drop_index('idx_paper_position_user_run', table_name='paper_position')
op.drop_table('paper_position')
op.drop_index('idx_paper_order_user_run_ts', table_name='paper_order')
op.drop_index('idx_paper_order_ts', table_name='paper_order')
op.drop_table('paper_order')
op.drop_index('idx_paper_equity_curve_user_run_ts', table_name='paper_equity_curve')
op.drop_index('idx_paper_equity_curve_ts', table_name='paper_equity_curve')
op.drop_table('paper_equity_curve')
op.drop_table('paper_broker_account')
op.drop_index('idx_mtm_ledger_user_run_ts', table_name='mtm_ledger')
op.drop_index('idx_mtm_ledger_ts', table_name='mtm_ledger')
op.drop_table('mtm_ledger')
op.drop_index('idx_event_ledger_user_run_logical', table_name='event_ledger')
op.drop_index('idx_event_ledger_user_run_ts', table_name='event_ledger')
op.drop_index('idx_event_ledger_ts', table_name='event_ledger')
op.drop_table('event_ledger')
op.drop_index('idx_engine_status_user_run', table_name='engine_status')
op.drop_table('engine_status')
op.drop_table('engine_state_paper')
op.drop_table('engine_state')
op.drop_index('idx_engine_event_user_run_ts', table_name='engine_event')
op.drop_index('idx_engine_event_ts', table_name='engine_event')
op.drop_table('engine_event')
op.drop_index('idx_zerodha_session_user_id', table_name='zerodha_session')
op.drop_index('idx_zerodha_session_linked_at', table_name='zerodha_session')
op.drop_table('zerodha_session')
op.drop_table('zerodha_request_token')
op.drop_index('idx_user_broker_connected', table_name='user_broker')
op.drop_index('idx_user_broker_broker', table_name='user_broker')
op.drop_table('user_broker')
op.drop_index('uq_one_running_run_per_user', table_name='strategy_run', postgresql_where=sa.text("status = 'RUNNING'"))
op.drop_index('idx_strategy_run_user_status', table_name='strategy_run')
op.drop_index('idx_strategy_run_user_created', table_name='strategy_run')
op.drop_table('strategy_run')
op.drop_index('idx_app_session_user_id', table_name='app_session')
op.drop_index('idx_app_session_expires_at', table_name='app_session')
op.drop_table('app_session')
op.drop_index('idx_market_close_symbol', table_name='market_close')
op.drop_index('idx_market_close_date', table_name='market_close')
op.drop_table('market_close')
op.drop_index('idx_app_user_role', table_name='app_user')
op.drop_index('idx_app_user_is_super_admin', table_name='app_user')
op.drop_index('idx_app_user_is_admin', table_name='app_user')
op.drop_table('app_user')
op.drop_table('admin_role_audit')
op.drop_table('admin_audit_log')
# ### end Alembic commands ###

76
paper_mtm.py Normal file
View File

@ -0,0 +1,76 @@
from typing import Any, Dict
from pathlib import Path
import sys
from fastapi import APIRouter, Request
from app.services.paper_broker_service import get_paper_broker
from app.services.tenant import get_request_user_id
from app.services.run_service import get_active_run_id
PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path:
sys.path.append(str(PROJECT_ROOT))
from indian_paper_trading_strategy.engine.db import engine_context
from market import get_ltp
from indian_paper_trading_strategy.engine.state import load_state
router = APIRouter(prefix="/api/paper", tags=["paper-mtm"])
@router.get("/mtm")
def paper_mtm(request: Request) -> Dict[str, Any]:
user_id = get_request_user_id(request)
run_id = get_active_run_id(user_id)
with engine_context(user_id, run_id):
broker = get_paper_broker(user_id)
positions = broker.get_positions()
state = load_state(mode="PAPER")
cash = float(state.get("cash", 0))
initial_cash = float(state.get("initial_cash", 0))
ltp_payload = get_ltp(allow_cache=True)
ltp_map = ltp_payload["ltp"]
mtm_positions = []
positions_value = 0.0
for pos in positions:
symbol = pos.get("symbol")
if not symbol:
continue
qty = float(pos.get("qty", 0))
avg_price = float(pos.get("avg_price", 0))
ltp = ltp_map.get(symbol)
if ltp is None:
continue
pnl = (ltp - avg_price) * qty
positions_value += qty * ltp
mtm_positions.append(
{
"symbol": symbol,
"qty": qty,
"avg_price": avg_price,
"ltp": ltp,
"pnl": pnl,
}
)
equity = cash + positions_value
unrealized_pnl = equity - float(initial_cash)
return {
"ts": ltp_payload["ts"],
"initial_cash": initial_cash,
"cash": cash,
"positions_value": positions_value,
"equity": equity,
"unrealized_pnl": unrealized_pnl,
"positions": mtm_positions,
"price_stale": ltp_payload.get("stale_any", False),
"price_source": ltp_payload.get("source", {}),
}

43
requirements.txt Normal file
View File

@ -0,0 +1,43 @@
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.1
beautifulsoup4==4.14.3
certifi==2026.1.4
cffi==2.0.0
charset-normalizer==3.4.4
click==8.3.1
colorama==0.4.6
cryptography==46.0.3
curl_cffi==0.13.0
fastapi==0.128.0
frozendict==2.4.7
h11==0.16.0
idna==3.11
httpx==0.27.2
multitasking==0.0.12
numpy==2.4.1
pandas==2.3.3
peewee==3.19.0
platformdirs==4.5.1
protobuf==6.33.4
psycopg2-binary==2.9.11
SQLAlchemy==2.0.36
pycparser==2.23
pydantic==2.12.5
pydantic_core==2.41.5
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.32.5
six==1.17.0
soupsieve==2.8.1
starlette==0.50.0
ta==0.11.0
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.3
urllib3==2.6.3
uvicorn==0.40.0
websockets==16.0
yfinance==1.0
alembic==1.13.3
pytest==8.3.5

29
run_backend.ps1 Normal file
View File

@ -0,0 +1,29 @@
Set-Location "C:\Users\quantfortune\SIP\SIP_India\backend"
$env:DB_HOST = 'localhost'
$env:DB_PORT = '5432'
$env:DB_NAME = 'trading_db'
$env:DB_USER = 'trader'
$env:DB_PASSWORD = 'traderpass'
$env:DB_SCHEMA = 'quant_app'
$env:DB_CONNECT_TIMEOUT = '5'
$frontendUrlFile = 'C:\Users\quantfortune\SIP\SIP_India\ngrok_frontend_url.txt'
$env:ZERODHA_REDIRECT_URL = 'http://localhost:3000/login'
if (Test-Path $frontendUrlFile) {
$frontendUrl = (Get-Content $frontendUrlFile -Raw).Trim()
if ($frontendUrl) {
$env:CORS_ORIGINS = "http://localhost:3000,http://127.0.0.1:3000,$frontendUrl"
$env:COOKIE_SECURE = '1'
$env:COOKIE_SAMESITE = 'none'
$env:ZERODHA_REDIRECT_URL = "$frontendUrl/login"
}
}
$env:BROKER_TOKEN_KEY = '6SuYLz0n7-KM5nB_Bs6ueYgDXZZvbmf-K-WpFbOMbH4='
$env:SUPER_ADMIN_EMAIL = 'admin@example.com'
$env:SUPER_ADMIN_PASSWORD = 'AdminPass123!'
$env:SMTP_HOST = 'smtp.gmail.com'
$env:SMTP_PORT = '587'
$env:SMTP_USER = 'quantfortune@gmail.com'
$env:SMTP_PASS = 'wkbk mwbi aiqo yvwl'
$env:SMTP_FROM_NAME = 'Quantfortune Support'
$env:RESET_OTP_SECRET = 'change_this_secret'
.\venv\Scripts\uvicorn.exe app.main:app --host 0.0.0.0 --port 8000

4
uvicorn.err Normal file
View File

@ -0,0 +1,4 @@
INFO: Started server process [5344]
INFO: Waiting for application startup.
INFO: Application startup complete.
INFO: Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit)

1
uvicorn.log Normal file
View File

@ -0,0 +1 @@
INFO: 127.0.0.1:60429 - "GET /api/me HTTP/1.1" 401 Unauthorized