only load 7 days

This commit is contained in:
2025-11-10 10:52:57 -08:00
parent e04346a0f2
commit 5524d7308c
4 changed files with 81 additions and 66 deletions

View File

@@ -19,7 +19,7 @@ class FilevineClient:
"x-fv-orgid": str(FV_ORG_ID),
"x-fv-userid": str(FV_USER_ID),
}
def get_bearer_token(self) -> str:
"""Get a new bearer token using Filevine credentials"""
url = "https://identity.filevine.com/connect/token"
@@ -41,16 +41,21 @@ class FilevineClient:
self.bearer_token = token
self.headers["Authorization"] = f"Bearer {token}"
return token
def list_all_projects(self) -> List[Dict[str, Any]]:
"""Fetch all projects from Filevine API"""
def list_all_projects(self, latest_activity_since: Optional[str] = None) -> List[Dict[str, Any]]:
"""Fetch all projects from Filevine API, optionally filtered by latest activity date.
Args:
latest_activity_since: Optional date string in mm/dd/yyyy, mm-dd-yyyy, or yyyy-mm-dd format.
Only projects with activity since this date will be returned.
"""
base = f"{self.base_url}/Projects?limit=500"
results = []
last_count = None
tries = 0
offset = 0
cnt = 0
while True:
cnt = len(results)
print(f"list try {tries}, starting at {offset}, previous count {last_count}, currently at {cnt}")
@@ -60,6 +65,11 @@ class FilevineClient:
if last_count is not None:
offset = offset + last_count
params["offset"] = offset
# Add latestActivitySince filter if provided
if latest_activity_since:
params["latestActivitySince"] = latest_activity_since
r = requests.get(url, headers=self.headers, params=params, timeout=30)
r.raise_for_status()
page = r.json()
@@ -73,42 +83,42 @@ class FilevineClient:
if tries > 200:
break
return results
def fetch_project_detail(self, project_id_native: int) -> Dict[str, Any]:
"""Fetch detailed information for a specific project"""
url = f"{self.base_url}/Projects/{project_id_native}"
r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status()
return r.json()
def fetch_project_team(self, project_id_native: int) -> List[Dict[str, Any]]:
"""Fetch team members for a specific project"""
url = f"{self.base_url}/Projects/{project_id_native}/team?limit=1000"
r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status()
return r.json().get('items') or []
def fetch_project_tasks(self, project_id_native: int) -> Dict[str, Any]:
"""Fetch tasks for a specific project"""
url = f"{self.base_url}/Projects/{project_id_native}/tasks"
r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status()
return r.json()
def fetch_client(self, client_id_native: int) -> Dict[str, Any]:
"""Fetch client information by client ID"""
url = f"{self.base_url}/contacts/{client_id_native}"
r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status()
return r.json()
def fetch_contacts(self, project_id_native: int) -> Optional[List[Dict[str, Any]]]:
"""Fetch contacts for a specific project"""
url = f"{self.base_url}/projects/{project_id_native}/contacts"
r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status()
return r.json().get("items")
def fetch_form(self, project_id_native: int, form: str) -> Dict[str, Any]:
"""Fetch a specific form for a project"""
try:
@@ -119,7 +129,7 @@ class FilevineClient:
except Exception as e:
print(e)
return {}
def fetch_collection(self, project_id_native: int, collection: str) -> List[Dict[str, Any]]:
"""Fetch a collection for a project"""
try:

View File

@@ -179,7 +179,7 @@ class ProjectModel:
"service_attempt_date_1": self.service_attempt_date_1,
"contacts": self.contacts,
"ProjectEmailAddress": self.project_email_address,
"Number": self.number,
"number": self.number,
"IncidentDate": self.incident_date,
"ProjectId": self.project_id,
"ProjectName": self.project_name,
@@ -247,4 +247,4 @@ class ProjectModel:
project_url=data.get("ProjectUrl", ""),
property_contacts=data.get("property_contacts", {}),
viewing_emails=data.get("viewing_emails", [])
)
)

107
sync.py
View File

@@ -19,26 +19,26 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
def convert_to_pacific_time(date_str):
"""Convert UTC date string to Pacific Time and format as YYYY-MM-DD.
Args:
date_str (str): UTC date string in ISO 8601 format (e.g., "2025-10-24T19:20:22.377Z")
Returns:
str: Date formatted as YYYY-MM-DD in Pacific Time, or empty string if input is empty
"""
if not date_str:
return ''
try:
# Parse the UTC datetime
utc_time = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
# Set timezone to UTC
utc_time = utc_time.replace(tzinfo=pytz.UTC)
# Convert to Pacific Time
pacific_time = utc_time.astimezone(pytz.timezone('America/Los_Angeles'))
# Format as YYYY-MM-DD
return pacific_time.strftime('%Y-%m-%d')
except (ValueError, AttributeError) as e:
@@ -70,15 +70,15 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
"""
# Set the FilevineClient for this thread
set_filevine_client(client)
p = project_data
pid = (p.get("projectId") or {}).get("native")
print(f"Working on {pid} ({index}/{total})")
client = get_filevine_client()
if pid is None:
return {}
try:
c = client.fetch_client((p.get("clientId") or {}).get("native"))
cs = client.fetch_contacts(pid)
@@ -86,9 +86,9 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
except Exception as e:
print(f"[WARN] Failed to fetch essential data for {pid}: {e}")
return {}
defendant_one = next((c.get('orgContact', {}) for c in cs if "Defendant" in c.get('orgContact', {}).get('personTypes', [])), {})
try:
new_file_review = client.fetch_form(pid, "newFileReview") or {}
dates_and_deadlines = client.fetch_form(pid, "datesAndDeadlines") or {}
@@ -98,7 +98,7 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
fees_and_costs = client.fetch_form(pid, "feesAndCosts") or {}
property_contacts = client.fetch_form(pid, "propertyContacts") or {}
lease_info_np = client.fetch_form(pid, "leaseInfoNP") or {}
tasks_result = client.fetch_project_tasks(pid)
completed_tasks = [{"description": x.get("body"),
"completed": convert_to_pacific_time(x.get("completedDate"))}
@@ -108,73 +108,73 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
"completed": convert_to_pacific_time(x.get("completedDate"))}
for x in tasks_result.get("items", [])
if not x.get("isCompleted")]
team = client.fetch_project_team(pid)
assigned_attorney = next((m.get('fullname')
for m in team
assigned_attorney = next((m.get('fullname')
for m in team
if ('Assigned Attorney' in [r.get('name') for r in m.get('teamOrgRoles')])
), '')
primary_contact = next((m.get('fullname')
for m in team
primary_contact = next((m.get('fullname')
for m in team
if ('Primary' in [r.get('name') for r in m.get('teamOrgRoles')])
), '')
secondary_paralegal = next((m.get('fullname')
for m in team
secondary_paralegal = next((m.get('fullname')
for m in team
if ('Secondary Paralegal' in [r.get('name') for r in m.get('teamOrgRoles')])
), '')
# Extract notice service and expiration dates
notice_service_date = convert_to_pacific_time(new_file_review.get("noticeServiceDate")) or ''
notice_expiration_date = convert_to_pacific_time(new_file_review.get("noticeExpirationDate")) or ''
# Extract daily rent damages
daily_rent_damages = lease_info_np.get("dailyRentDamages") or dates_and_deadlines.get("dailyRentDamages") or ''
# Extract default date
default_date = convert_to_pacific_time(dates_and_deadlines.get("defaultDate")) or ''
case_filed_date = convert_to_pacific_time(dates_and_deadlines.get("dateCaseFiled")) or ''
# Extract motion hearing dates
demurrer_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("demurrerHearingDate")) or ''
motion_to_strike_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("mTSHearingDate")) or ''
motion_to_quash_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("mTQHearingDate")) or ''
other_motion_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("otherMotion1HearingDate")) or ''
# Extract MSC details
msc_date = convert_to_pacific_time(dates_and_deadlines.get("mSCDate")) or ''
msc_time = dates_and_deadlines.get("mSCTime") or '' # Time field, not converting
msc_address = dates_and_deadlines.get("mSCAddress") or ''
msc_div_dept_room = dates_and_deadlines.get("mSCDeptDiv") or ''
# Extract trial details
trial_date = convert_to_pacific_time(dates_and_deadlines.get("trialDate")) or ''
trial_time = dates_and_deadlines.get("trialTime") or '' # Time field, not converting
trial_address = dates_and_deadlines.get("trialAddress") or ''
trial_div_dept_room = dates_and_deadlines.get("trialDeptDivRoom") or ''
# Extract final result of trial/MSC
final_result = dates_and_deadlines.get("finalResultOfTrialMSCCa") or ''
# Extract settlement details
date_of_settlement = convert_to_pacific_time(dates_and_deadlines.get("dateOfStipulation")) or ''
final_obligation = dates_and_deadlines.get("finalObligationUnderTheStip") or ''
def_comply_stip = dates_and_deadlines.get("defendantsComplyWithStip") or ''
# Extract judgment and writ details
judgment_date = convert_to_pacific_time(dates_and_deadlines.get("dateOfJudgment")) or ''
writ_issued_date = convert_to_pacific_time(dates_and_deadlines.get("writIssuedDate")) or ''
# Extract lockout and stay details
scheduled_lockout = convert_to_pacific_time(dates_and_deadlines.get("sheriffScheduledDate")) or ''
oppose_stays = dates_and_deadlines.get("opposeStays") or ''
# Extract premises safety and entry code
premises_safety = new_file_review.get("lockoutSafetyIssuesOrSpecialCareIssues") or ''
matter_gate_code = property_info.get("propertyEntryCodeOrInstructions") or ''
# Extract possession recovered date
date_possession_recovered = convert_to_pacific_time(dates_and_deadlines.get("datePossessionRecovered")) or ''
# Extract attorney fees and costs
attorney_fees = fees_and_costs.get("totalAttorneysFees") or ''
costs = fees_and_costs.get("totalCosts") or ''
@@ -185,7 +185,7 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
valid_property_managers = [e.get('address').lower() for pm in property_managers if pm and pm.get('emails') for e in pm.get('emails') if e and e.get('address')]
pprint(valid_property_managers)
row = ProjectModel(
client=c.get("firstName", ""),
matter_description=p.get("projectName", ""),
@@ -245,17 +245,19 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
)
# Store the results in Firestore
from app import db # Import db from app
projects_ref = db.collection("projects")
from pprint import pprint
# pprint([p.get("number"), property_info, new_file_review])
# Add new projects
project_id = row.project_id
if project_id:
projects_ref.document(str(project_id)).set(row.to_dict())
print(f"Finished on {pid} ({index}/{total})")
print(f"Finished on {pid} Matter {row.number} ({index}/{total})")
return row.to_dict()
except Exception as e:
print(f"[ERROR] Processing failed for {pid}: {e}")
return {}
@@ -263,12 +265,12 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
def process_projects_parallel(projects: List[dict], client: FilevineClient, max_workers: int = 9) -> List[Dict[str, Any]]:
"""
Process projects in parallel using a worker pool.
Args:
projects: List of project data dictionaries
client: FilevineClient instance
max_workers: Number of concurrent workers (default 9)
Returns:
List of processed project dictionaries
"""
@@ -277,7 +279,7 @@ def process_projects_parallel(projects: List[dict], client: FilevineClient, max_
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers, initializer=worker_init, initargs=(client,)) as executor:
# Submit all tasks to the executor
future_to_project = {executor.submit(process_project, indx, total, project, client): project for indx, project in enumerate(projects)}
# Collect results as they complete
results = []
for future in concurrent.futures.as_completed(future_to_project):
@@ -288,7 +290,7 @@ def process_projects_parallel(projects: List[dict], client: FilevineClient, max_
print(f"[ERROR] Processing failed: {e}")
# Add empty dict or handle error appropriately
results.append({})
return results
def main():
@@ -298,18 +300,21 @@ def main():
# Initialize Filevine client
client = FilevineClient()
bearer = client.get_bearer_token()
# List projects (all pages)
projects = client.list_all_projects()
# List projects (all pages) with filter for projects updated in the last 7 days
from datetime import datetime, timedelta
seven_days_ago = (datetime.now() - timedelta(days=7)).strftime('%Y-%m-%d')
projects = client.list_all_projects(latest_activity_since=seven_days_ago)
#projects = [p for p in projects if (p.get("projectId") or {}).get("native") == 15914808]
#projects = projects[:10]
# Process projects in parallel
detailed_rows = process_projects_parallel(projects, client, 9)
print(f"Successfully synced {len(detailed_rows)} projects to Firestore")
except Exception as e:
print(f"Error during sync: {e}")
import traceback
@@ -317,4 +322,4 @@ def main():
sys.exit(1)
if __name__ == "__main__":
main()
main()

View File

@@ -136,7 +136,7 @@
<tbody class="bg-slate-100 divide-y divide-slate-300">
{% for r in rows %}
<tr class="hover:bg-slate-200 transition-colors duration-150 ease-in-out">
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Num')}"></td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Num')}">{{ r.number }}</td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Client / Property')}">{{ r.client }}</td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Description')}">{{ r.matter_description }}</td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Defendant 1')}">{{ r.defendant_1 }}</td>