separates syncing
This commit is contained in:
97
app.py
97
app.py
@@ -39,7 +39,7 @@ if not all([FV_CLIENT_ID, FV_CLIENT_SECRET, FV_PAT, FV_ORG_ID, FV_USER_ID]):
|
|||||||
print("[WARN] Missing one or more Filevine env vars — dashboard will fail until set.")
|
print("[WARN] Missing one or more Filevine env vars — dashboard will fail until set.")
|
||||||
|
|
||||||
# --- Cache ---
|
# --- Cache ---
|
||||||
from cache import project_cache
|
# No longer using cache - projects are stored in Firestore
|
||||||
|
|
||||||
PHASES = {
|
PHASES = {
|
||||||
209436: "Nonpayment File Review",
|
209436: "Nonpayment File Review",
|
||||||
@@ -93,7 +93,7 @@ def get_user_profile(uid: str):
|
|||||||
|
|
||||||
|
|
||||||
def fetch_all_projects():
|
def fetch_all_projects():
|
||||||
"""Fetch all projects for a user and cache them"""
|
"""Fetch all projects for a user and store them in Firestore"""
|
||||||
|
|
||||||
print("Fetching projects....")
|
print("Fetching projects....")
|
||||||
# Get bearer token
|
# Get bearer token
|
||||||
@@ -101,17 +101,14 @@ def fetch_all_projects():
|
|||||||
|
|
||||||
# List projects (all pages)
|
# List projects (all pages)
|
||||||
projects = list_all_projects(bearer)
|
projects = list_all_projects(bearer)
|
||||||
# todo, only 10 projects
|
projects = projects[:250]
|
||||||
projects = projects[:50]
|
|
||||||
|
|
||||||
# Fetch details for each
|
# Fetch details for each
|
||||||
detailed_rows = []
|
detailed_rows = []
|
||||||
for p in projects:
|
for p in projects:
|
||||||
pid = (p.get("projectId") or {}).get("native")
|
pid = (p.get("projectId") or {}).get("native")
|
||||||
c = fetch_client(bearer, (p.get("clientId") or {}).get("native"))
|
c = fetch_client(bearer, (p.get("clientId") or {}).get("native"))
|
||||||
print("fetched client")
|
|
||||||
cs = fetch_contacts(bearer, pid)
|
cs = fetch_contacts(bearer, pid)
|
||||||
print("fetched contacts")
|
|
||||||
|
|
||||||
if pid is None:
|
if pid is None:
|
||||||
continue
|
continue
|
||||||
@@ -151,7 +148,6 @@ def fetch_all_projects():
|
|||||||
motion_to_quash_hearing_date = dates_and_deadlines.get("mTQHearingDate") or ''
|
motion_to_quash_hearing_date = dates_and_deadlines.get("mTQHearingDate") or ''
|
||||||
other_motion_hearing_date = dates_and_deadlines.get("otherMotion1HearingDate") or ''
|
other_motion_hearing_date = dates_and_deadlines.get("otherMotion1HearingDate") or ''
|
||||||
|
|
||||||
pprint(dates_and_deadlines)
|
|
||||||
# Extract MSC details
|
# Extract MSC details
|
||||||
msc_date = dates_and_deadlines.get("mSCDate") or ''
|
msc_date = dates_and_deadlines.get("mSCDate") or ''
|
||||||
msc_time = dates_and_deadlines.get("mSCTime") or ''
|
msc_time = dates_and_deadlines.get("mSCTime") or ''
|
||||||
@@ -248,36 +244,23 @@ def fetch_all_projects():
|
|||||||
}
|
}
|
||||||
detailed_rows.append(row)
|
detailed_rows.append(row)
|
||||||
|
|
||||||
# Cache the results
|
# Store the results in Firestore
|
||||||
project_cache.set_projects(detailed_rows)
|
projects_ref = db.collection("projects")
|
||||||
|
# Clear existing projects
|
||||||
|
projects_ref.stream()
|
||||||
|
for doc in projects_ref.stream():
|
||||||
|
doc.reference.delete()
|
||||||
|
|
||||||
|
# Add new projects
|
||||||
|
for row in detailed_rows:
|
||||||
|
project_id = str(row.get("ProjectId"))
|
||||||
|
if project_id:
|
||||||
|
projects_ref.document(project_id).set(row)
|
||||||
|
|
||||||
|
print(f"Stored {len(detailed_rows)} projects in Firestore")
|
||||||
return detailed_rows
|
return detailed_rows
|
||||||
|
|
||||||
import time
|
# No longer using cache - projects are stored in Firestore
|
||||||
import threading
|
|
||||||
|
|
||||||
def async_cache_projects():
|
|
||||||
from threading import Thread
|
|
||||||
|
|
||||||
def cache_loop():
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
# Check if cache is already being updated to avoid concurrent updates
|
|
||||||
if not project_cache.is_updating():
|
|
||||||
project_cache.set_updating(True)
|
|
||||||
fetch_all_projects()
|
|
||||||
project_cache.set_updating(False)
|
|
||||||
else:
|
|
||||||
print("Cache update already in progress, skipping this cycle")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in cache loop: {e}")
|
|
||||||
project_cache.set_updating(False)
|
|
||||||
|
|
||||||
# Wait for 15 minutes before next update
|
|
||||||
time.sleep(15 * 60) # 15 minutes in seconds
|
|
||||||
|
|
||||||
thread = Thread(target=cache_loop, args=())
|
|
||||||
thread.daemon = True
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
# --- Routes ---
|
# --- Routes ---
|
||||||
@app.route("/")
|
@app.route("/")
|
||||||
@@ -317,8 +300,6 @@ def session_login():
|
|||||||
# Optional: short session
|
# Optional: short session
|
||||||
session["expires_at"] = (datetime.utcnow() + timedelta(hours=8)).isoformat()
|
session["expires_at"] = (datetime.utcnow() + timedelta(hours=8)).isoformat()
|
||||||
|
|
||||||
async_cache_projects()
|
|
||||||
|
|
||||||
return jsonify({"ok": True})
|
return jsonify({"ok": True})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[ERR] session_login:", e)
|
print("[ERR] session_login:", e)
|
||||||
@@ -368,25 +349,32 @@ def list_all_projects(bearer: str):
|
|||||||
"x-fv-userid": str(FV_USER_ID),
|
"x-fv-userid": str(FV_USER_ID),
|
||||||
}
|
}
|
||||||
results = []
|
results = []
|
||||||
last_id = None
|
last_count = None
|
||||||
tries = 0
|
tries = 0
|
||||||
|
offset = 0
|
||||||
# TODO we probably need to sync the data with fierbase
|
# TODO we probably need to sync the data with fierbase
|
||||||
while len(results) < 200:
|
cnt = 0
|
||||||
|
while len(results) < 250:
|
||||||
cnt = len(results)
|
cnt = len(results)
|
||||||
print(f"list try {tries}, last_id {last_id}, count {cnt}")
|
print(f"list try {tries}, starting at {offset}, previous count {last_count}, currently at {cnt}")
|
||||||
tries += 1
|
tries += 1
|
||||||
url = base
|
url = base
|
||||||
params = {}
|
params = {}
|
||||||
if last_id is not None:
|
if last_count is not None:
|
||||||
# Some deployments use LastID/Offset pagination; adapt if needed
|
# Some deployments use LastID/Offset pagination; adapt if needed
|
||||||
params["lastID"] = last_id
|
offset = offset + last_count
|
||||||
|
print(f"OFFSET f{offset}")
|
||||||
|
params["offset"] = offset
|
||||||
r = requests.get(url, headers=headers, params=params, timeout=30)
|
r = requests.get(url, headers=headers, params=params, timeout=30)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
page = r.json()
|
page = r.json()
|
||||||
|
from pprint import pprint
|
||||||
items = page.get("items", [])
|
items = page.get("items", [])
|
||||||
results.extend(items)
|
results.extend(items)
|
||||||
has_more = page.get("hasMore")
|
has_more = page.get("hasMore")
|
||||||
last_id = page.get("lastID")
|
last_count = page.get("count")
|
||||||
|
from pprint import pprint
|
||||||
|
pprint(page)
|
||||||
if not has_more:
|
if not has_more:
|
||||||
break
|
break
|
||||||
# Safety valve
|
# Safety valve
|
||||||
@@ -494,23 +482,20 @@ def dashboard():
|
|||||||
if not case_email:
|
if not case_email:
|
||||||
return redirect(url_for("welcome"))
|
return redirect(url_for("welcome"))
|
||||||
|
|
||||||
# Check cache first
|
# Read projects directly from Firestore
|
||||||
cached_projects = project_cache.get_projects()
|
projects_ref = db.collection("projects")
|
||||||
if cached_projects is not None:
|
docs = projects_ref.stream()
|
||||||
detailed_rows = cached_projects
|
detailed_rows = []
|
||||||
print("USING CACHE")
|
|
||||||
else:
|
for doc in docs:
|
||||||
# Fetch and cache projects
|
detailed_rows.append(doc.to_dict())
|
||||||
detailed_rows = fetch_all_projects()
|
|
||||||
print("FETCHING")
|
print(f"Retrieved {len(detailed_rows)} projects from Firestore")
|
||||||
|
|
||||||
print("HI", len(detailed_rows))
|
# Render table
|
||||||
|
|
||||||
# 5) Render table
|
|
||||||
return render_template("dashboard.html", rows=detailed_rows, case_email=case_email)
|
return render_template("dashboard.html", rows=detailed_rows, case_email=case_email)
|
||||||
|
|
||||||
|
|
||||||
# GAE compatibility
|
# GAE compatibility
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
async_cache_projects()
|
|
||||||
app.run(debug=True, host="0.0.0.0", port=int(os.environ.get("PORT", "5004")))
|
app.run(debug=True, host="0.0.0.0", port=int(os.environ.get("PORT", "5004")))
|
||||||
|
|||||||
27
sync.py
Normal file
27
sync.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Sync script to fetch and store projects in Firestore
|
||||||
|
This can be run manually from the command line to update the projects collection
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Add the current directory to the Python path so we can import app
|
||||||
|
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
from app import fetch_all_projects
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main function to fetch and sync projects"""
|
||||||
|
print("Starting project sync...")
|
||||||
|
try:
|
||||||
|
# Fetch all projects and store them in Firestore
|
||||||
|
projects = fetch_all_projects()
|
||||||
|
print(f"Successfully synced {len(projects)} projects to Firestore")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error during sync: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user