only load 7 days

This commit is contained in:
2025-11-10 10:52:57 -08:00
parent e04346a0f2
commit 5524d7308c
4 changed files with 81 additions and 66 deletions

View File

@@ -19,7 +19,7 @@ class FilevineClient:
"x-fv-orgid": str(FV_ORG_ID), "x-fv-orgid": str(FV_ORG_ID),
"x-fv-userid": str(FV_USER_ID), "x-fv-userid": str(FV_USER_ID),
} }
def get_bearer_token(self) -> str: def get_bearer_token(self) -> str:
"""Get a new bearer token using Filevine credentials""" """Get a new bearer token using Filevine credentials"""
url = "https://identity.filevine.com/connect/token" url = "https://identity.filevine.com/connect/token"
@@ -41,16 +41,21 @@ class FilevineClient:
self.bearer_token = token self.bearer_token = token
self.headers["Authorization"] = f"Bearer {token}" self.headers["Authorization"] = f"Bearer {token}"
return token return token
def list_all_projects(self) -> List[Dict[str, Any]]: def list_all_projects(self, latest_activity_since: Optional[str] = None) -> List[Dict[str, Any]]:
"""Fetch all projects from Filevine API""" """Fetch all projects from Filevine API, optionally filtered by latest activity date.
Args:
latest_activity_since: Optional date string in mm/dd/yyyy, mm-dd-yyyy, or yyyy-mm-dd format.
Only projects with activity since this date will be returned.
"""
base = f"{self.base_url}/Projects?limit=500" base = f"{self.base_url}/Projects?limit=500"
results = [] results = []
last_count = None last_count = None
tries = 0 tries = 0
offset = 0 offset = 0
cnt = 0 cnt = 0
while True: while True:
cnt = len(results) cnt = len(results)
print(f"list try {tries}, starting at {offset}, previous count {last_count}, currently at {cnt}") print(f"list try {tries}, starting at {offset}, previous count {last_count}, currently at {cnt}")
@@ -60,6 +65,11 @@ class FilevineClient:
if last_count is not None: if last_count is not None:
offset = offset + last_count offset = offset + last_count
params["offset"] = offset params["offset"] = offset
# Add latestActivitySince filter if provided
if latest_activity_since:
params["latestActivitySince"] = latest_activity_since
r = requests.get(url, headers=self.headers, params=params, timeout=30) r = requests.get(url, headers=self.headers, params=params, timeout=30)
r.raise_for_status() r.raise_for_status()
page = r.json() page = r.json()
@@ -73,42 +83,42 @@ class FilevineClient:
if tries > 200: if tries > 200:
break break
return results return results
def fetch_project_detail(self, project_id_native: int) -> Dict[str, Any]: def fetch_project_detail(self, project_id_native: int) -> Dict[str, Any]:
"""Fetch detailed information for a specific project""" """Fetch detailed information for a specific project"""
url = f"{self.base_url}/Projects/{project_id_native}" url = f"{self.base_url}/Projects/{project_id_native}"
r = requests.get(url, headers=self.headers, timeout=30) r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status() r.raise_for_status()
return r.json() return r.json()
def fetch_project_team(self, project_id_native: int) -> List[Dict[str, Any]]: def fetch_project_team(self, project_id_native: int) -> List[Dict[str, Any]]:
"""Fetch team members for a specific project""" """Fetch team members for a specific project"""
url = f"{self.base_url}/Projects/{project_id_native}/team?limit=1000" url = f"{self.base_url}/Projects/{project_id_native}/team?limit=1000"
r = requests.get(url, headers=self.headers, timeout=30) r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status() r.raise_for_status()
return r.json().get('items') or [] return r.json().get('items') or []
def fetch_project_tasks(self, project_id_native: int) -> Dict[str, Any]: def fetch_project_tasks(self, project_id_native: int) -> Dict[str, Any]:
"""Fetch tasks for a specific project""" """Fetch tasks for a specific project"""
url = f"{self.base_url}/Projects/{project_id_native}/tasks" url = f"{self.base_url}/Projects/{project_id_native}/tasks"
r = requests.get(url, headers=self.headers, timeout=30) r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status() r.raise_for_status()
return r.json() return r.json()
def fetch_client(self, client_id_native: int) -> Dict[str, Any]: def fetch_client(self, client_id_native: int) -> Dict[str, Any]:
"""Fetch client information by client ID""" """Fetch client information by client ID"""
url = f"{self.base_url}/contacts/{client_id_native}" url = f"{self.base_url}/contacts/{client_id_native}"
r = requests.get(url, headers=self.headers, timeout=30) r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status() r.raise_for_status()
return r.json() return r.json()
def fetch_contacts(self, project_id_native: int) -> Optional[List[Dict[str, Any]]]: def fetch_contacts(self, project_id_native: int) -> Optional[List[Dict[str, Any]]]:
"""Fetch contacts for a specific project""" """Fetch contacts for a specific project"""
url = f"{self.base_url}/projects/{project_id_native}/contacts" url = f"{self.base_url}/projects/{project_id_native}/contacts"
r = requests.get(url, headers=self.headers, timeout=30) r = requests.get(url, headers=self.headers, timeout=30)
r.raise_for_status() r.raise_for_status()
return r.json().get("items") return r.json().get("items")
def fetch_form(self, project_id_native: int, form: str) -> Dict[str, Any]: def fetch_form(self, project_id_native: int, form: str) -> Dict[str, Any]:
"""Fetch a specific form for a project""" """Fetch a specific form for a project"""
try: try:
@@ -119,7 +129,7 @@ class FilevineClient:
except Exception as e: except Exception as e:
print(e) print(e)
return {} return {}
def fetch_collection(self, project_id_native: int, collection: str) -> List[Dict[str, Any]]: def fetch_collection(self, project_id_native: int, collection: str) -> List[Dict[str, Any]]:
"""Fetch a collection for a project""" """Fetch a collection for a project"""
try: try:

View File

@@ -179,7 +179,7 @@ class ProjectModel:
"service_attempt_date_1": self.service_attempt_date_1, "service_attempt_date_1": self.service_attempt_date_1,
"contacts": self.contacts, "contacts": self.contacts,
"ProjectEmailAddress": self.project_email_address, "ProjectEmailAddress": self.project_email_address,
"Number": self.number, "number": self.number,
"IncidentDate": self.incident_date, "IncidentDate": self.incident_date,
"ProjectId": self.project_id, "ProjectId": self.project_id,
"ProjectName": self.project_name, "ProjectName": self.project_name,
@@ -247,4 +247,4 @@ class ProjectModel:
project_url=data.get("ProjectUrl", ""), project_url=data.get("ProjectUrl", ""),
property_contacts=data.get("property_contacts", {}), property_contacts=data.get("property_contacts", {}),
viewing_emails=data.get("viewing_emails", []) viewing_emails=data.get("viewing_emails", [])
) )

107
sync.py
View File

@@ -19,26 +19,26 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)))
def convert_to_pacific_time(date_str): def convert_to_pacific_time(date_str):
"""Convert UTC date string to Pacific Time and format as YYYY-MM-DD. """Convert UTC date string to Pacific Time and format as YYYY-MM-DD.
Args: Args:
date_str (str): UTC date string in ISO 8601 format (e.g., "2025-10-24T19:20:22.377Z") date_str (str): UTC date string in ISO 8601 format (e.g., "2025-10-24T19:20:22.377Z")
Returns: Returns:
str: Date formatted as YYYY-MM-DD in Pacific Time, or empty string if input is empty str: Date formatted as YYYY-MM-DD in Pacific Time, or empty string if input is empty
""" """
if not date_str: if not date_str:
return '' return ''
try: try:
# Parse the UTC datetime # Parse the UTC datetime
utc_time = datetime.fromisoformat(date_str.replace('Z', '+00:00')) utc_time = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
# Set timezone to UTC # Set timezone to UTC
utc_time = utc_time.replace(tzinfo=pytz.UTC) utc_time = utc_time.replace(tzinfo=pytz.UTC)
# Convert to Pacific Time # Convert to Pacific Time
pacific_time = utc_time.astimezone(pytz.timezone('America/Los_Angeles')) pacific_time = utc_time.astimezone(pytz.timezone('America/Los_Angeles'))
# Format as YYYY-MM-DD # Format as YYYY-MM-DD
return pacific_time.strftime('%Y-%m-%d') return pacific_time.strftime('%Y-%m-%d')
except (ValueError, AttributeError) as e: except (ValueError, AttributeError) as e:
@@ -70,15 +70,15 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
""" """
# Set the FilevineClient for this thread # Set the FilevineClient for this thread
set_filevine_client(client) set_filevine_client(client)
p = project_data p = project_data
pid = (p.get("projectId") or {}).get("native") pid = (p.get("projectId") or {}).get("native")
print(f"Working on {pid} ({index}/{total})") print(f"Working on {pid} ({index}/{total})")
client = get_filevine_client() client = get_filevine_client()
if pid is None: if pid is None:
return {} return {}
try: try:
c = client.fetch_client((p.get("clientId") or {}).get("native")) c = client.fetch_client((p.get("clientId") or {}).get("native"))
cs = client.fetch_contacts(pid) cs = client.fetch_contacts(pid)
@@ -86,9 +86,9 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
except Exception as e: except Exception as e:
print(f"[WARN] Failed to fetch essential data for {pid}: {e}") print(f"[WARN] Failed to fetch essential data for {pid}: {e}")
return {} return {}
defendant_one = next((c.get('orgContact', {}) for c in cs if "Defendant" in c.get('orgContact', {}).get('personTypes', [])), {}) defendant_one = next((c.get('orgContact', {}) for c in cs if "Defendant" in c.get('orgContact', {}).get('personTypes', [])), {})
try: try:
new_file_review = client.fetch_form(pid, "newFileReview") or {} new_file_review = client.fetch_form(pid, "newFileReview") or {}
dates_and_deadlines = client.fetch_form(pid, "datesAndDeadlines") or {} dates_and_deadlines = client.fetch_form(pid, "datesAndDeadlines") or {}
@@ -98,7 +98,7 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
fees_and_costs = client.fetch_form(pid, "feesAndCosts") or {} fees_and_costs = client.fetch_form(pid, "feesAndCosts") or {}
property_contacts = client.fetch_form(pid, "propertyContacts") or {} property_contacts = client.fetch_form(pid, "propertyContacts") or {}
lease_info_np = client.fetch_form(pid, "leaseInfoNP") or {} lease_info_np = client.fetch_form(pid, "leaseInfoNP") or {}
tasks_result = client.fetch_project_tasks(pid) tasks_result = client.fetch_project_tasks(pid)
completed_tasks = [{"description": x.get("body"), completed_tasks = [{"description": x.get("body"),
"completed": convert_to_pacific_time(x.get("completedDate"))} "completed": convert_to_pacific_time(x.get("completedDate"))}
@@ -108,73 +108,73 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
"completed": convert_to_pacific_time(x.get("completedDate"))} "completed": convert_to_pacific_time(x.get("completedDate"))}
for x in tasks_result.get("items", []) for x in tasks_result.get("items", [])
if not x.get("isCompleted")] if not x.get("isCompleted")]
team = client.fetch_project_team(pid) team = client.fetch_project_team(pid)
assigned_attorney = next((m.get('fullname') assigned_attorney = next((m.get('fullname')
for m in team for m in team
if ('Assigned Attorney' in [r.get('name') for r in m.get('teamOrgRoles')]) if ('Assigned Attorney' in [r.get('name') for r in m.get('teamOrgRoles')])
), '') ), '')
primary_contact = next((m.get('fullname') primary_contact = next((m.get('fullname')
for m in team for m in team
if ('Primary' in [r.get('name') for r in m.get('teamOrgRoles')]) if ('Primary' in [r.get('name') for r in m.get('teamOrgRoles')])
), '') ), '')
secondary_paralegal = next((m.get('fullname') secondary_paralegal = next((m.get('fullname')
for m in team for m in team
if ('Secondary Paralegal' in [r.get('name') for r in m.get('teamOrgRoles')]) if ('Secondary Paralegal' in [r.get('name') for r in m.get('teamOrgRoles')])
), '') ), '')
# Extract notice service and expiration dates # Extract notice service and expiration dates
notice_service_date = convert_to_pacific_time(new_file_review.get("noticeServiceDate")) or '' notice_service_date = convert_to_pacific_time(new_file_review.get("noticeServiceDate")) or ''
notice_expiration_date = convert_to_pacific_time(new_file_review.get("noticeExpirationDate")) or '' notice_expiration_date = convert_to_pacific_time(new_file_review.get("noticeExpirationDate")) or ''
# Extract daily rent damages # Extract daily rent damages
daily_rent_damages = lease_info_np.get("dailyRentDamages") or dates_and_deadlines.get("dailyRentDamages") or '' daily_rent_damages = lease_info_np.get("dailyRentDamages") or dates_and_deadlines.get("dailyRentDamages") or ''
# Extract default date # Extract default date
default_date = convert_to_pacific_time(dates_and_deadlines.get("defaultDate")) or '' default_date = convert_to_pacific_time(dates_and_deadlines.get("defaultDate")) or ''
case_filed_date = convert_to_pacific_time(dates_and_deadlines.get("dateCaseFiled")) or '' case_filed_date = convert_to_pacific_time(dates_and_deadlines.get("dateCaseFiled")) or ''
# Extract motion hearing dates # Extract motion hearing dates
demurrer_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("demurrerHearingDate")) or '' demurrer_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("demurrerHearingDate")) or ''
motion_to_strike_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("mTSHearingDate")) or '' motion_to_strike_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("mTSHearingDate")) or ''
motion_to_quash_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("mTQHearingDate")) or '' motion_to_quash_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("mTQHearingDate")) or ''
other_motion_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("otherMotion1HearingDate")) or '' other_motion_hearing_date = convert_to_pacific_time(dates_and_deadlines.get("otherMotion1HearingDate")) or ''
# Extract MSC details # Extract MSC details
msc_date = convert_to_pacific_time(dates_and_deadlines.get("mSCDate")) or '' msc_date = convert_to_pacific_time(dates_and_deadlines.get("mSCDate")) or ''
msc_time = dates_and_deadlines.get("mSCTime") or '' # Time field, not converting msc_time = dates_and_deadlines.get("mSCTime") or '' # Time field, not converting
msc_address = dates_and_deadlines.get("mSCAddress") or '' msc_address = dates_and_deadlines.get("mSCAddress") or ''
msc_div_dept_room = dates_and_deadlines.get("mSCDeptDiv") or '' msc_div_dept_room = dates_and_deadlines.get("mSCDeptDiv") or ''
# Extract trial details # Extract trial details
trial_date = convert_to_pacific_time(dates_and_deadlines.get("trialDate")) or '' trial_date = convert_to_pacific_time(dates_and_deadlines.get("trialDate")) or ''
trial_time = dates_and_deadlines.get("trialTime") or '' # Time field, not converting trial_time = dates_and_deadlines.get("trialTime") or '' # Time field, not converting
trial_address = dates_and_deadlines.get("trialAddress") or '' trial_address = dates_and_deadlines.get("trialAddress") or ''
trial_div_dept_room = dates_and_deadlines.get("trialDeptDivRoom") or '' trial_div_dept_room = dates_and_deadlines.get("trialDeptDivRoom") or ''
# Extract final result of trial/MSC # Extract final result of trial/MSC
final_result = dates_and_deadlines.get("finalResultOfTrialMSCCa") or '' final_result = dates_and_deadlines.get("finalResultOfTrialMSCCa") or ''
# Extract settlement details # Extract settlement details
date_of_settlement = convert_to_pacific_time(dates_and_deadlines.get("dateOfStipulation")) or '' date_of_settlement = convert_to_pacific_time(dates_and_deadlines.get("dateOfStipulation")) or ''
final_obligation = dates_and_deadlines.get("finalObligationUnderTheStip") or '' final_obligation = dates_and_deadlines.get("finalObligationUnderTheStip") or ''
def_comply_stip = dates_and_deadlines.get("defendantsComplyWithStip") or '' def_comply_stip = dates_and_deadlines.get("defendantsComplyWithStip") or ''
# Extract judgment and writ details # Extract judgment and writ details
judgment_date = convert_to_pacific_time(dates_and_deadlines.get("dateOfJudgment")) or '' judgment_date = convert_to_pacific_time(dates_and_deadlines.get("dateOfJudgment")) or ''
writ_issued_date = convert_to_pacific_time(dates_and_deadlines.get("writIssuedDate")) or '' writ_issued_date = convert_to_pacific_time(dates_and_deadlines.get("writIssuedDate")) or ''
# Extract lockout and stay details # Extract lockout and stay details
scheduled_lockout = convert_to_pacific_time(dates_and_deadlines.get("sheriffScheduledDate")) or '' scheduled_lockout = convert_to_pacific_time(dates_and_deadlines.get("sheriffScheduledDate")) or ''
oppose_stays = dates_and_deadlines.get("opposeStays") or '' oppose_stays = dates_and_deadlines.get("opposeStays") or ''
# Extract premises safety and entry code # Extract premises safety and entry code
premises_safety = new_file_review.get("lockoutSafetyIssuesOrSpecialCareIssues") or '' premises_safety = new_file_review.get("lockoutSafetyIssuesOrSpecialCareIssues") or ''
matter_gate_code = property_info.get("propertyEntryCodeOrInstructions") or '' matter_gate_code = property_info.get("propertyEntryCodeOrInstructions") or ''
# Extract possession recovered date # Extract possession recovered date
date_possession_recovered = convert_to_pacific_time(dates_and_deadlines.get("datePossessionRecovered")) or '' date_possession_recovered = convert_to_pacific_time(dates_and_deadlines.get("datePossessionRecovered")) or ''
# Extract attorney fees and costs # Extract attorney fees and costs
attorney_fees = fees_and_costs.get("totalAttorneysFees") or '' attorney_fees = fees_and_costs.get("totalAttorneysFees") or ''
costs = fees_and_costs.get("totalCosts") or '' costs = fees_and_costs.get("totalCosts") or ''
@@ -185,7 +185,7 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
valid_property_managers = [e.get('address').lower() for pm in property_managers if pm and pm.get('emails') for e in pm.get('emails') if e and e.get('address')] valid_property_managers = [e.get('address').lower() for pm in property_managers if pm and pm.get('emails') for e in pm.get('emails') if e and e.get('address')]
pprint(valid_property_managers) pprint(valid_property_managers)
row = ProjectModel( row = ProjectModel(
client=c.get("firstName", ""), client=c.get("firstName", ""),
matter_description=p.get("projectName", ""), matter_description=p.get("projectName", ""),
@@ -245,17 +245,19 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
) )
# Store the results in Firestore # Store the results in Firestore
from app import db # Import db from app from app import db # Import db from app
projects_ref = db.collection("projects") projects_ref = db.collection("projects")
from pprint import pprint
# pprint([p.get("number"), property_info, new_file_review])
# Add new projects # Add new projects
project_id = row.project_id project_id = row.project_id
if project_id: if project_id:
projects_ref.document(str(project_id)).set(row.to_dict()) projects_ref.document(str(project_id)).set(row.to_dict())
print(f"Finished on {pid} ({index}/{total})") print(f"Finished on {pid} Matter {row.number} ({index}/{total})")
return row.to_dict() return row.to_dict()
except Exception as e: except Exception as e:
print(f"[ERROR] Processing failed for {pid}: {e}") print(f"[ERROR] Processing failed for {pid}: {e}")
return {} return {}
@@ -263,12 +265,12 @@ def process_project(index: int, total: int, project_data: dict, client: Filevine
def process_projects_parallel(projects: List[dict], client: FilevineClient, max_workers: int = 9) -> List[Dict[str, Any]]: def process_projects_parallel(projects: List[dict], client: FilevineClient, max_workers: int = 9) -> List[Dict[str, Any]]:
""" """
Process projects in parallel using a worker pool. Process projects in parallel using a worker pool.
Args: Args:
projects: List of project data dictionaries projects: List of project data dictionaries
client: FilevineClient instance client: FilevineClient instance
max_workers: Number of concurrent workers (default 9) max_workers: Number of concurrent workers (default 9)
Returns: Returns:
List of processed project dictionaries List of processed project dictionaries
""" """
@@ -277,7 +279,7 @@ def process_projects_parallel(projects: List[dict], client: FilevineClient, max_
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers, initializer=worker_init, initargs=(client,)) as executor: with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers, initializer=worker_init, initargs=(client,)) as executor:
# Submit all tasks to the executor # Submit all tasks to the executor
future_to_project = {executor.submit(process_project, indx, total, project, client): project for indx, project in enumerate(projects)} future_to_project = {executor.submit(process_project, indx, total, project, client): project for indx, project in enumerate(projects)}
# Collect results as they complete # Collect results as they complete
results = [] results = []
for future in concurrent.futures.as_completed(future_to_project): for future in concurrent.futures.as_completed(future_to_project):
@@ -288,7 +290,7 @@ def process_projects_parallel(projects: List[dict], client: FilevineClient, max_
print(f"[ERROR] Processing failed: {e}") print(f"[ERROR] Processing failed: {e}")
# Add empty dict or handle error appropriately # Add empty dict or handle error appropriately
results.append({}) results.append({})
return results return results
def main(): def main():
@@ -298,18 +300,21 @@ def main():
# Initialize Filevine client # Initialize Filevine client
client = FilevineClient() client = FilevineClient()
bearer = client.get_bearer_token() bearer = client.get_bearer_token()
# List projects (all pages) # List projects (all pages) with filter for projects updated in the last 7 days
projects = client.list_all_projects() from datetime import datetime, timedelta
seven_days_ago = (datetime.now() - timedelta(days=7)).strftime('%Y-%m-%d')
projects = client.list_all_projects(latest_activity_since=seven_days_ago)
#projects = [p for p in projects if (p.get("projectId") or {}).get("native") == 15914808] #projects = [p for p in projects if (p.get("projectId") or {}).get("native") == 15914808]
#projects = projects[:10]
# Process projects in parallel # Process projects in parallel
detailed_rows = process_projects_parallel(projects, client, 9) detailed_rows = process_projects_parallel(projects, client, 9)
print(f"Successfully synced {len(detailed_rows)} projects to Firestore") print(f"Successfully synced {len(detailed_rows)} projects to Firestore")
except Exception as e: except Exception as e:
print(f"Error during sync: {e}") print(f"Error during sync: {e}")
import traceback import traceback
@@ -317,4 +322,4 @@ def main():
sys.exit(1) sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -136,7 +136,7 @@
<tbody class="bg-slate-100 divide-y divide-slate-300"> <tbody class="bg-slate-100 divide-y divide-slate-300">
{% for r in rows %} {% for r in rows %}
<tr class="hover:bg-slate-200 transition-colors duration-150 ease-in-out"> <tr class="hover:bg-slate-200 transition-colors duration-150 ease-in-out">
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Num')}"></td> <td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Num')}">{{ r.number }}</td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Client / Property')}">{{ r.client }}</td> <td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Client / Property')}">{{ r.client }}</td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Description')}">{{ r.matter_description }}</td> <td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Matter Description')}">{{ r.matter_description }}</td>
<td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Defendant 1')}">{{ r.defendant_1 }}</td> <td class="px-4 py-3 text-sm text-slate-800" :class="{'hidden': !isColumnVisible('Defendant 1')}">{{ r.defendant_1 }}</td>