Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dashboard/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ services:
- redis_data:/data
### PostgresQL
postgresql:
image: docker.io/postgres:latest
image: docker.io/postgres:17-bookworm
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=secret
Expand Down
82 changes: 74 additions & 8 deletions dashboard/src/t5gweb/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from jira.exceptions import JIRAError
from t5gweb import libtelco5g
from t5gweb.database import load_cases_postgres, load_jira_card_postgres
from t5gweb.database.operations import get_current_sprint_cards_from_postgres
from t5gweb.utils import format_comment, format_date, make_headers


Expand Down Expand Up @@ -62,12 +63,27 @@ def get_cases(cfg):
if "case_closedDate" in case:
cases[case["case_number"]]["closeddate"] = case["case_closedDate"]

# Load to Postgres first, then update Redis cache
postgres_success = False
try:
logging.warning("Loading cases to Postgres database")
load_cases_postgres(cases)
logging.warning("Successfully loaded cases to Postgres")
postgres_success = True
except Exception as e:
logging.error("Failed to load cases to Postgres: %s ", e)
logging.error("Failed to load cases to Postgres: %s. Skipping Redis cache update.", e)
# Don't update Redis if Postgres load fails to maintain data consistency
return

libtelco5g.redis_set("cases", json.dumps(cases))
# # Only update Redis if Postgres succeeded
# if postgres_success:
# try:
# logging.warning("Updating Redis cache with cases")
# libtelco5g.redis_set("cases", json.dumps(cases))
# logging.warning("Successfully updated Redis cache with cases")
# except Exception as e:
# logging.error("Failed to update Redis cache with cases: %s. Data is safely stored in Postgres.", e)
# # Postgres data is safe, Redis cache failure is not critical


def get_escalations(cfg, cases):
Expand Down Expand Up @@ -140,17 +156,28 @@ def get_cards(cfg, self=None, background=False):
)
if card_data:
jira_cards[card.key] = card_data
load_jira_card_postgres(cases, card_data["case_number"], card)
# Load to Postgres
try:
load_jira_card_postgres(cases, card_data["case_number"], card)
except Exception as postgres_error:
logging.error("Failed to load card %s to Postgres: %s", card.key, postgres_error)
# Remove from jira_cards if Postgres load fails to maintain consistency
jira_cards.pop(card.key, None)
continue

except Exception as e:
logging.warning("Error processing card %s: %s", card, str(e))
continue

# Cache the results
libtelco5g.redis_set("cards", json.dumps(jira_cards))
libtelco5g.redis_set(
"timestamp", json.dumps(str(datetime.datetime.now(datetime.timezone.utc)))
)

# After successful card processing, refresh cache from Postgres to get current sprint data
try:
logging.warning("Refreshing Redis cache from Postgres to get current sprint data")
refresh_result = refresh_cache_from_postgres(cfg)
logging.warning("Cache refresh result: %s", refresh_result)
except Exception as e:
logging.error("Failed to refresh cache from Postgres: %s", e)

return {"cards cached": len(jira_cards)}


Expand Down Expand Up @@ -692,6 +719,45 @@ def _extract_private_keywords(bug):
return None


def refresh_cache_from_postgres(cfg):
"""Refresh Redis cache using data from Postgres database

This decouples cache updates from data loading - cache is refreshed
from the database rather than from API calls.
"""
logging.warning("Refreshing Redis cache from Postgres database")

try:
# Get current sprint data from Postgres
sprint_data = get_current_sprint_cards_from_postgres(cfg)
cards = sprint_data["cards"]
cases = sprint_data["cases"]

# Update Redis cache with Postgres data
try:
logging.warning("Updating Redis cache with %d cards from Postgres", len(cards))
libtelco5g.redis_set("cards", json.dumps(cards))

logging.warning("Updating Redis cache with %d cases from Postgres", len(cases))
libtelco5g.redis_set("cases", json.dumps(cases))

# Update timestamp
libtelco5g.redis_set(
"timestamp", json.dumps(str(datetime.datetime.now(datetime.timezone.utc)))
)

logging.warning("Successfully refreshed Redis cache from Postgres")
return {"cards_cached": len(cards), "cases_cached": len(cases)}

except Exception as redis_error:
logging.error("Failed to update Redis cache: %s. Data is available in Postgres.", redis_error)
return {"error": "Redis update failed", "cards_available": len(cards), "cases_available": len(cases)}

except Exception as postgres_error:
logging.error("Failed to read data from Postgres: %s", postgres_error)
return {"error": "Postgres read failed"}


def get_stats():
logging.warning("caching {} stats")
all_stats = libtelco5g.redis_get("stats")
Expand Down
92 changes: 92 additions & 0 deletions dashboard/src/t5gweb/database/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from dateutil import parser
from t5gweb.utils import format_comment
from t5gweb import libtelco5g

from .models import Case, JiraCard, JiraComment
from .session import db_config
Expand Down Expand Up @@ -180,3 +181,94 @@ def load_jira_card_postgres(cases, case_number, issue):
session.close()

return card_processed, card_comments # Return both values


def get_current_sprint_cards_from_postgres(cfg):
"""Get JIRA cards from current sprint and their linked cases from Postgres"""
session = db_config.SessionLocal()
try:
# Get current sprint info from JIRA
jira_conn = libtelco5g.jira_connection(cfg)
board = libtelco5g.get_board_id(jira_conn, cfg["board"])

if cfg["sprintname"] and cfg["sprintname"] != "":
sprint = libtelco5g.get_latest_sprint(jira_conn, board.id, cfg["sprintname"])
sprint_name_pattern = f"%{sprint.name}%"

# Query cards from current sprint
sprint_cards = session.query(JiraCard).filter(
JiraCard.sprint.like(sprint_name_pattern)
).all()
else:
# If no sprint specified, get all cards
sprint_cards = session.query(JiraCard).all()

# Build cards dictionary
cards_dict = {}
cases_dict = {}

for card in sprint_cards:
# Get linked case
case = session.query(Case).filter_by(
case_number=card.case_number,
created_date=card.created_date
).first()

if case:
# Add case to cases dictionary
cases_dict[case.case_number] = {
"owner": case.owner,
"severity": f"{case.severity} (Urgent)" if case.severity == 1 else str(case.severity),
"account": case.account,
"problem": case.summary,
"status": case.status,
"createdate": case.created_date.isoformat(),
"last_update": case.last_update.isoformat() if case.last_update else None,
"description": case.description,
"product": case.product,
"product_version": case.product_version,
}

# Get comments for this card
comments = [(comment.body, comment.last_update_date.isoformat())
for comment in card.comments]

# Build card data matching Redis format
cards_dict[card.jira_card_id] = {
"case_number": card.case_number,
"summary": card.summary,
"priority": card.priority,
"card_status": libtelco5g.status_map.get(card.status, card.status),
"assignee": {"key": card.assignee, "name": card.assignee, "displayName": card.assignee} if card.assignee else {"displayName": None, "key": None, "name": None},
"severity": card.severity,
"comments": comments,
"card_created": card.created_date.isoformat(),
"last_update": card.last_update_date.isoformat() if card.last_update_date else None,
"account": case.account,
"description": case.description,
"product": case.product,
"case_status": case.status,
"tags": [], # Will need to be populated from case details if needed
"labels": [], # Will need to be populated from JIRA if needed
"bugzilla": None, # Will need separate query if needed
"issues": None, # Will need separate query if needed
"escalated": False, # Will need separate query if needed
"escalated_link": None,
"potential_escalation": False,
"crit_sit": False, # Will need separate query if needed
"group_name": None,
"case_updated_date": case.last_update.strftime("%Y-%m-%d %H:%M") if case.last_update else None,
"case_days_open": (datetime.now(timezone.utc).replace(tzinfo=None) - case.created_date).days,
"notified_users": [],
"relief_at": None,
"resolved_at": None,
"daily_telco": False,
"contributor": [],
}

return {"cards": cards_dict, "cases": cases_dict}

finally:
session.close()