Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sagemathinc
GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/python/cocalc-api/tests/conftest.py
5581 views
1
"""
2
Pytest configuration and fixtures for cocalc-api tests.
3
"""
4
import json
5
import os
6
import time
7
import uuid
8
import pytest
9
10
from cocalc_api import Hub, Project
11
12
from psycopg2 import pool as pg_pool
13
from typing import Callable, TypeVar
14
15
# Database configuration examples (DRY principle)
16
PGHOST_SOCKET_EXAMPLE = "/path/to/cocalc-data/socket"
17
PGHOST_NETWORK_EXAMPLE = "localhost"
18
19
T = TypeVar('T')
20
21
22
def retry_with_backoff(
23
func: Callable[[], T],
24
max_retries: int = 3,
25
retry_delay: int = 5,
26
error_condition: Callable[[RuntimeError],
27
bool] = lambda e: any(keyword in str(e).lower() for keyword in ["timeout", "closed", "connection", "reset", "broken"]),
28
) -> T:
29
"""
30
Retry a function call with exponential backoff for timeout and connection errors.
31
32
This helper is useful for operations that may timeout or fail on first attempt due to
33
cold starts (e.g., kernel launches) or transient connection issues.
34
35
Args:
36
func: Callable that performs the operation
37
max_retries: Maximum number of attempts (default: 3)
38
retry_delay: Delay in seconds between retries (default: 5)
39
error_condition: Function to determine if an error should trigger retry.
40
Defaults to checking for timeout/connection-related keywords.
41
42
Returns:
43
The result of the function call
44
45
Raises:
46
RuntimeError: If all retries fail or error condition doesn't match
47
"""
48
for attempt in range(max_retries):
49
try:
50
return func()
51
except RuntimeError as e:
52
error_msg = str(e).lower()
53
is_retryable = error_condition(e)
54
if is_retryable and attempt < max_retries - 1:
55
print(f"Attempt {attempt + 1} failed ({error_msg[:50]}...), retrying in {retry_delay}s...")
56
time.sleep(retry_delay)
57
else:
58
raise
59
60
# This should never be reached due to the loop, but mypy needs this
61
raise RuntimeError("Retry loop exhausted without returning")
62
63
64
def assert_valid_uuid(value, description="value"):
65
"""
66
Assert that the given value is a string and a valid UUID.
67
68
Args:
69
value: The value to check
70
description: Description of the value for error messages
71
"""
72
assert isinstance(value, str), f"{description} should be a string, got {type(value)}"
73
assert len(value) > 0, f"{description} should not be empty"
74
75
try:
76
uuid.UUID(value)
77
except ValueError:
78
pytest.fail(f"{description} should be a valid UUID, got: {value}")
79
80
81
def cleanup_project(hub, project_id):
82
"""
83
Clean up a test project by stopping it and deleting it.
84
85
Args:
86
hub: Hub client instance
87
project_id: Project ID to cleanup
88
"""
89
try:
90
hub.projects.stop(project_id)
91
except Exception as e:
92
print(f"Warning: Failed to stop project {project_id}: {e}")
93
94
try:
95
hub.projects.delete(project_id)
96
except Exception as e:
97
print(f"Warning: Failed to delete project {project_id}: {e}")
98
99
100
@pytest.fixture(scope="session")
101
def api_key():
102
"""Get API key from environment variable."""
103
key = os.environ.get("COCALC_API_KEY")
104
if not key:
105
pytest.fail("COCALC_API_KEY environment variable is required but not set")
106
return key
107
108
109
@pytest.fixture(scope="session")
110
def cocalc_host():
111
"""Get CoCalc host from environment variable, default to localhost:5000."""
112
return os.environ.get("COCALC_HOST", "http://localhost:5000")
113
114
115
@pytest.fixture(scope="session")
116
def hub(api_key, cocalc_host):
117
"""Create Hub client instance."""
118
return Hub(api_key=api_key, host=cocalc_host)
119
120
121
@pytest.fixture(scope="session")
122
def validate_api_key_config(hub):
123
"""
124
Validate that the API key is properly configured for testing.
125
126
For account-scoped keys, requires COCALC_PROJECT_ID to be set.
127
For project-scoped keys, no additional configuration needed.
128
"""
129
scope = None
130
hub_error = None
131
132
# First, try the hub endpoint (works only for account-scoped keys)
133
try:
134
scope = hub.system.test()
135
except Exception as e:
136
hub_error = e
137
138
# If hub check failed, fall back to the project endpoint so project-scoped keys work
139
if scope is None:
140
try:
141
project_client = Project(
142
api_key=hub.api_key,
143
host=hub.host,
144
project_id=os.environ.get("COCALC_PROJECT_ID"),
145
)
146
scope = project_client.system.test()
147
except Exception as project_error:
148
pytest.fail(
149
"Failed to determine API key scope using both hub and project endpoints:\n"
150
f" hub error: {hub_error}\n"
151
f" project error: {project_error}"
152
)
153
154
is_account_scoped = "account_id" in scope
155
is_project_scoped = "project_id" in scope
156
157
if is_account_scoped:
158
# Account-scoped key requires COCALC_PROJECT_ID for project tests
159
project_id = os.environ.get("COCALC_PROJECT_ID")
160
if not project_id:
161
pytest.fail("Account-scoped API key detected, but COCALC_PROJECT_ID is not set.\n\n"
162
"For testing with an account-scoped key, you must provide a project ID:\n"
163
" export COCALC_PROJECT_ID=<your-project-uuid>\n\n"
164
"Alternatively, use a project-scoped API key which has the project ID embedded.")
165
elif not is_project_scoped:
166
pytest.fail(f"Could not determine API key scope. Response: {scope}\n"
167
"Expected either 'account_id' (account-scoped) or 'project_id' (project-scoped).")
168
169
170
@pytest.fixture(scope="session")
171
def temporary_project(hub, resource_tracker, request, validate_api_key_config):
172
"""
173
Create a temporary project for testing and return project info.
174
Uses a session-scoped fixture so only ONE project is created for the entire test suite.
175
"""
176
# Create a project with a timestamp to make it unique and identifiable
177
timestamp = time.strftime("%Y%m%d-%H%M%S")
178
title = f"CoCalc API Test {timestamp}"
179
description = "Temporary project created by cocalc-api tests"
180
181
# Use tracked creation
182
project_id = create_tracked_project(hub, resource_tracker, title=title, description=description)
183
184
# Start the project so it can respond to API calls
185
try:
186
hub.projects.start(project_id)
187
188
# Wait for project to be ready (can take 10-15 seconds)
189
from cocalc_api import Project
190
191
test_project = Project(project_id=project_id, api_key=hub.api_key, host=hub.host)
192
for attempt in range(10):
193
time.sleep(5) # Wait 5 seconds before checking
194
try:
195
# Try to ping the project to see if it's ready
196
test_project.system.ping() # If this succeeds, project is ready
197
break
198
except Exception:
199
if attempt == 9: # Last attempt
200
print(f"Warning: Project {project_id} did not become ready within 50 seconds")
201
else:
202
print(f"Warning: Project {project_id} may not be ready yet")
203
204
ensure_python3_kernel(test_project)
205
206
except Exception as e:
207
print(f"Warning: Failed to start project {project_id}: {e}")
208
209
project_info = {'project_id': project_id, 'title': title, 'description': description}
210
211
# Note: No finalizer needed - cleanup happens automatically via cleanup_all_test_resources
212
213
return project_info
214
215
216
@pytest.fixture(scope="session")
217
def project_client(temporary_project, api_key, cocalc_host):
218
"""Create Project client instance using temporary project."""
219
return Project(project_id=temporary_project['project_id'], api_key=api_key, host=cocalc_host)
220
221
222
@pytest.fixture(autouse=True)
223
def cleanup_kernels_after_test(request, project_client):
224
"""
225
Clean up excess Jupyter kernels after test classes that use them.
226
227
Kernel accumulation happens because the kernel pool reuses kernels, but under
228
heavy test load, old kernels aren't always properly cleaned up by the pool.
229
This fixture cleans up accumulated kernels BETWEEN test classes (not between
230
individual tests) to avoid interfering with the pool's reuse strategy.
231
232
The fixture only runs for tests in classes that deal with Jupyter kernels
233
(TestJupyterExecuteViaHub, TestJupyterExecuteViaProject, TestJupyterKernelManagement)
234
to avoid interfering with other tests.
235
"""
236
yield # Allow test to run
237
238
# Only cleanup for Jupyter-related tests
239
test_class = request.cls
240
if test_class is None:
241
return
242
243
jupyter_test_classes = {
244
'TestJupyterExecuteViaHub',
245
'TestJupyterExecuteViaProject',
246
'TestJupyterKernelManagement',
247
}
248
249
if test_class.__name__ not in jupyter_test_classes:
250
return
251
252
# Clean up accumulated kernels carefully
253
# Only cleanup if we have more kernels than the pool can manage (> 3)
254
# This gives some buffer to the pool's reuse mechanism
255
try:
256
import time
257
kernels = project_client.system.list_jupyter_kernels()
258
259
# Only cleanup if significantly over pool size (pool size is 2)
260
# We use threshold of 3 to trigger cleanup
261
if len(kernels) > 3:
262
# Keep the 2 most recent kernels (higher PIDs), stop older ones
263
kernels_sorted = sorted(kernels, key=lambda k: k.get("pid", 0))
264
kernels_to_stop = kernels_sorted[:-2] # All but the 2 newest
265
266
for kernel in kernels_to_stop:
267
try:
268
project_client.system.stop_jupyter_kernel(pid=kernel["pid"])
269
time.sleep(0.1) # Small delay between kills
270
except Exception:
271
# Silently ignore individual kernel failures
272
pass
273
except Exception:
274
# If listing kernels fails, just continue
275
pass
276
277
278
def ensure_python3_kernel(project_client: Project):
279
"""
280
Ensure the default python3 Jupyter kernel is installed in the project.
281
282
If not available, install ipykernel and register the kernelspec.
283
"""
284
285
def try_exec(command: list[str], timeout: int = 60, capture_stdout: bool = False):
286
try:
287
result = project_client.system.exec(
288
command=command[0],
289
args=command[1:],
290
timeout=timeout,
291
)
292
return (True, result["stdout"] if capture_stdout else None)
293
except Exception as err:
294
print(f"Warning: command {command} failed: {err}")
295
return (False, None)
296
297
def has_python_kernel() -> bool:
298
ok, stdout = try_exec(
299
["python3", "-m", "jupyter", "kernelspec", "list", "--json"],
300
capture_stdout=True,
301
)
302
if not ok or stdout is None:
303
return False
304
try:
305
data = json.loads(stdout)
306
return "python3" in data.get("kernelspecs", {})
307
except Exception as err:
308
print(f"Warning: Failed to parse kernelspec list: {err}")
309
return False
310
311
if has_python_kernel():
312
return
313
314
print("Installing python3 kernelspec in project...")
315
# Install pip if needed
316
try_exec(["python3", "-m", "ensurepip", "--user"], timeout=120)
317
# Upgrade pip but ignore errors (not fatal)
318
try_exec(["python3", "-m", "pip", "install", "--user", "--upgrade", "pip"], timeout=120)
319
320
if not try_exec(["python3", "-m", "pip", "install", "--user", "ipykernel"], timeout=300):
321
raise RuntimeError("Failed to install ipykernel via pip")
322
323
if not try_exec(
324
[
325
"python3",
326
"-m",
327
"ipykernel",
328
"install",
329
"--user",
330
"--name=python3",
331
"--display-name=Python 3",
332
],
333
timeout=120,
334
):
335
raise RuntimeError("Failed to install python3 kernelspec")
336
337
if not has_python_kernel():
338
raise RuntimeError("Failed to ensure python3 kernelspec is installed in project")
339
340
341
# ============================================================================
342
# Database Cleanup Infrastructure
343
# ============================================================================
344
345
346
@pytest.fixture(scope="session")
347
def resource_tracker():
348
"""
349
Track all resources created during tests for cleanup.
350
351
This fixture provides a dictionary of sets that automatically tracks
352
all projects, accounts, and organizations created during test execution.
353
At the end of the test session, all tracked resources are automatically
354
hard-deleted from the database.
355
356
Usage:
357
def test_my_feature(hub, resource_tracker):
358
# Create tracked resources using helper functions
359
org_id = create_tracked_org(hub, resource_tracker, "test-org")
360
user_id = create_tracked_user(hub, resource_tracker, "test-org", email="[email protected]")
361
project_id = create_tracked_project(hub, resource_tracker, title="Test Project")
362
363
# Test logic here...
364
365
# No cleanup needed - happens automatically!
366
367
Returns a dictionary with sets for tracking:
368
- projects: set of project_id (UUID strings)
369
- accounts: set of account_id (UUID strings)
370
- organizations: set of organization names (strings)
371
"""
372
tracker = {
373
'projects': set(),
374
'accounts': set(),
375
'organizations': set(),
376
}
377
return tracker
378
379
380
@pytest.fixture(scope="session")
381
def check_cleanup_config():
382
"""
383
Check cleanup configuration BEFORE any tests run.
384
Fails fast if cleanup is enabled but database credentials are missing.
385
"""
386
cleanup_enabled = os.environ.get("COCALC_TESTS_CLEANUP", "true").lower() != "false"
387
388
if not cleanup_enabled:
389
print("\n⚠ Database cleanup DISABLED via COCALC_TESTS_CLEANUP=false")
390
print(" Test resources will remain in the database.")
391
return # Skip checks if cleanup is disabled
392
393
# Cleanup is enabled - verify required configuration
394
pghost = os.environ.get("PGHOST")
395
396
# PGHOST is mandatory
397
if not pghost:
398
pytest.exit("\n" + "=" * 70 + "\n"
399
"ERROR: Database cleanup is enabled but PGHOST is not set!\n\n"
400
"To run tests, you must either:\n"
401
f" 1. Set PGHOST for socket connection (no password needed):\n"
402
f" export PGHOST={PGHOST_SOCKET_EXAMPLE}\n\n"
403
f" 2. Set PGHOST for network connection (requires PGPASSWORD):\n"
404
f" export PGHOST={PGHOST_NETWORK_EXAMPLE}\n"
405
" export PGPASSWORD=your_password\n\n"
406
" 3. Disable cleanup (not recommended):\n"
407
" export COCALC_TESTS_CLEANUP=false\n"
408
"=" * 70,
409
returncode=1)
410
411
412
@pytest.fixture(scope="session")
413
def db_pool(check_cleanup_config):
414
"""
415
Create a PostgreSQL connection pool for direct database cleanup.
416
417
Supports both Unix socket and network connections:
418
419
Socket connection (local dev):
420
export PGUSER=smc
421
export PGHOST=/path/to/cocalc-data/socket
422
# No password needed for socket auth
423
424
Network connection:
425
export PGUSER=smc
426
export PGHOST=localhost
427
export PGPORT=5432
428
export PGPASSWORD=your_password
429
430
To disable cleanup:
431
export COCALC_TESTS_CLEANUP=false
432
"""
433
# Check if cleanup is disabled
434
cleanup_enabled = os.environ.get("COCALC_TESTS_CLEANUP", "true").lower() != "false"
435
436
if not cleanup_enabled:
437
print("\n⚠ Database cleanup DISABLED via COCALC_TESTS_CLEANUP=false")
438
print(" Test resources will remain in the database.")
439
yield None
440
return
441
442
# Get connection parameters with defaults
443
pguser = os.environ.get("PGUSER", "smc")
444
pghost = os.environ.get("PGHOST")
445
pgport = os.environ.get("PGPORT", "5432")
446
pgdatabase = os.environ.get("PGDATABASE", "smc")
447
pgpassword = os.environ.get("PGPASSWORD")
448
449
# PGHOST is mandatory (already checked in check_cleanup_config, but double-check)
450
if not pghost:
451
pytest.fail("\n" + "=" * 70 + "\n"
452
"ERROR: PGHOST environment variable is required for database cleanup!\n"
453
"=" * 70)
454
455
# Determine if using socket or network connection
456
is_socket = pghost.startswith("/")
457
458
# Build connection kwargs
459
conn_kwargs = {
460
"host": pghost,
461
"database": pgdatabase,
462
"user": pguser,
463
}
464
465
# Only add port for network connections
466
if not is_socket:
467
conn_kwargs["port"] = pgport
468
469
# Only add password if provided
470
if pgpassword:
471
conn_kwargs["password"] = pgpassword
472
473
try:
474
connection_pool = pg_pool.SimpleConnectionPool(1, 5, **conn_kwargs)
475
476
if is_socket:
477
print(f"\n✓ Database cleanup enabled (socket): {pguser}@{pghost}/{pgdatabase}")
478
else:
479
print(f"\n✓ Database cleanup enabled (network): {pguser}@{pghost}:{pgport}/{pgdatabase}")
480
481
yield connection_pool
482
483
connection_pool.closeall()
484
485
except Exception as e:
486
conn_type = "socket" if is_socket else "network"
487
pytest.fail("\n" + "=" * 70 + "\n"
488
f"ERROR: Failed to connect to database ({conn_type}) for cleanup:\n{e}\n\n"
489
f"Connection details:\n"
490
f" Host: {pghost}\n"
491
f" Database: {pgdatabase}\n"
492
f" User: {pguser}\n" + (f" Port: {pgport}\n" if not is_socket else "") +
493
"\nTo disable cleanup: export COCALC_TESTS_CLEANUP=false\n"
494
"=" * 70)
495
496
497
def create_tracked_project(hub, resource_tracker, **kwargs):
498
"""Create a project and register it for cleanup."""
499
project_id = hub.projects.create_project(**kwargs)
500
resource_tracker['projects'].add(project_id)
501
return project_id
502
503
504
def create_tracked_user(hub, resource_tracker, org_name, **kwargs):
505
"""Create a user and register it for cleanup."""
506
user_id = hub.org.create_user(name=org_name, **kwargs)
507
resource_tracker['accounts'].add(user_id)
508
return user_id
509
510
511
def create_tracked_org(hub, resource_tracker, org_name):
512
"""Create an organization and register it for cleanup."""
513
org_id = hub.org.create(org_name)
514
resource_tracker['organizations'].add(org_name) # Track by name
515
return org_id
516
517
518
def hard_delete_projects(db_pool, project_ids):
519
"""Hard delete projects from database using direct SQL."""
520
if not project_ids:
521
return
522
523
conn = db_pool.getconn()
524
try:
525
cursor = conn.cursor()
526
for project_id in project_ids:
527
try:
528
cursor.execute("DELETE FROM projects WHERE project_id = %s", (project_id, ))
529
conn.commit()
530
print(f" ✓ Deleted project {project_id}")
531
except Exception as e:
532
conn.rollback()
533
print(f" ✗ Failed to delete project {project_id}: {e}")
534
cursor.close()
535
finally:
536
db_pool.putconn(conn)
537
538
539
def hard_delete_accounts(db_pool, account_ids):
540
"""
541
Hard delete accounts from database using direct SQL.
542
543
This also finds and deletes ALL projects where the account is the owner,
544
including auto-created projects like "My First Project".
545
"""
546
if not account_ids:
547
return
548
549
conn = db_pool.getconn()
550
try:
551
cursor = conn.cursor()
552
for account_id in account_ids:
553
try:
554
# First, find ALL projects where this account is the owner
555
# The users JSONB field has structure: {"account_id": {"group": "owner", ...}}
556
cursor.execute(
557
"""
558
SELECT project_id FROM projects
559
WHERE users ? %s
560
AND users->%s->>'group' = 'owner'
561
""", (account_id, account_id))
562
owned_projects = cursor.fetchall()
563
564
# Delete all owned projects (including auto-created ones)
565
for (project_id, ) in owned_projects:
566
cursor.execute("DELETE FROM projects WHERE project_id = %s", (project_id, ))
567
print(f" ✓ Deleted owned project {project_id} for account {account_id}")
568
569
# Remove from organizations (admin_account_ids array and users JSONB)
570
cursor.execute(
571
"UPDATE organizations SET admin_account_ids = array_remove(admin_account_ids, %s), users = users - %s WHERE users ? %s",
572
(account_id, account_id, account_id))
573
574
# Remove from remaining project collaborators (users JSONB field)
575
cursor.execute("UPDATE projects SET users = users - %s WHERE users ? %s", (account_id, account_id))
576
577
# Delete the account
578
cursor.execute("DELETE FROM accounts WHERE account_id = %s", (account_id, ))
579
conn.commit()
580
print(f" ✓ Deleted account {account_id}")
581
except Exception as e:
582
conn.rollback()
583
print(f" ✗ Failed to delete account {account_id}: {e}")
584
cursor.close()
585
finally:
586
db_pool.putconn(conn)
587
588
589
def hard_delete_organizations(db_pool, org_names):
590
"""Hard delete organizations from database using direct SQL."""
591
if not org_names:
592
return
593
594
conn = db_pool.getconn()
595
try:
596
cursor = conn.cursor()
597
for org_name in org_names:
598
try:
599
cursor.execute("DELETE FROM organizations WHERE name = %s", (org_name, ))
600
conn.commit()
601
print(f" ✓ Deleted organization {org_name}")
602
except Exception as e:
603
conn.rollback()
604
print(f" ✗ Failed to delete organization {org_name}: {e}")
605
cursor.close()
606
finally:
607
db_pool.putconn(conn)
608
609
610
@pytest.fixture(scope="session", autouse=True)
611
def cleanup_all_test_resources(hub, resource_tracker, db_pool, request):
612
"""
613
Automatically clean up all tracked resources at the end of the test session.
614
615
Cleanup is enabled by default. To disable:
616
export COCALC_TESTS_CLEANUP=false
617
"""
618
619
def cleanup():
620
# Skip cleanup if db_pool is None (cleanup disabled)
621
if db_pool is None:
622
print("\n⚠ Skipping database cleanup (COCALC_TESTS_CLEANUP=false)")
623
return
624
625
print("\n" + "=" * 70)
626
print("CLEANING UP TEST RESOURCES FROM DATABASE")
627
print("=" * 70)
628
629
total_projects = len(resource_tracker['projects'])
630
total_accounts = len(resource_tracker['accounts'])
631
total_orgs = len(resource_tracker['organizations'])
632
633
print("\nResources to clean up:")
634
print(f" - Projects: {total_projects}")
635
print(f" - Accounts: {total_accounts}")
636
print(f" - Organizations: {total_orgs}")
637
638
# First, soft-delete projects via API (stop them gracefully)
639
if total_projects > 0:
640
print(f"\nStopping {total_projects} projects...")
641
for project_id in resource_tracker['projects']:
642
try:
643
cleanup_project(hub, project_id)
644
except Exception as e:
645
print(f" Warning: Failed to stop project {project_id}: {e}")
646
647
# Then hard-delete from database in order:
648
# 1. Projects (no dependencies)
649
if total_projects > 0:
650
print(f"\nHard-deleting {total_projects} projects from database...")
651
hard_delete_projects(db_pool, resource_tracker['projects'])
652
653
# 2. Accounts (must remove from organizations/projects first)
654
if total_accounts > 0:
655
print(f"\nHard-deleting {total_accounts} accounts from database...")
656
hard_delete_accounts(db_pool, resource_tracker['accounts'])
657
658
# 3. Organizations (no dependencies after accounts removed)
659
if total_orgs > 0:
660
print(f"\nHard-deleting {total_orgs} organizations from database...")
661
hard_delete_organizations(db_pool, resource_tracker['organizations'])
662
663
print("\n✓ Test resource cleanup complete!")
664
print("=" * 70)
665
666
request.addfinalizer(cleanup)
667
668
yield
669
670
671
@pytest.fixture(scope="session", autouse=True)
672
def cleanup_jupyter_kernels_session(project_client):
673
"""
674
Clean up all Jupyter kernels created during the test session.
675
676
This session-scoped fixture ensures that all kernels spawned during testing
677
are properly terminated at the end of the test session. This prevents
678
orphaned processes from accumulating in the system.
679
680
The fixture runs AFTER all tests complete (via yield), ensuring no
681
interference with test execution while still guaranteeing cleanup.
682
"""
683
yield # Allow all tests to run first
684
685
# After all tests complete, clean up all remaining kernels
686
try:
687
kernels = project_client.system.list_jupyter_kernels()
688
if kernels:
689
print(f"\n{'='*70}")
690
print(f"CLEANING UP {len(kernels)} JUPYTER KERNELS FROM TEST SESSION")
691
print(f"{'='*70}")
692
for kernel in kernels:
693
try:
694
pid = kernel.get("pid")
695
result = project_client.system.stop_jupyter_kernel(pid=pid)
696
if result.get("success"):
697
print(f"✓ Stopped kernel PID {pid}")
698
else:
699
print(f"✗ Failed to stop kernel PID {pid}")
700
except Exception as e:
701
print(f"✗ Error stopping kernel: {e}")
702
print(f"{'='*70}\n")
703
except Exception as e:
704
print(f"Warning: Failed to clean up jupyter kernels: {e}")
705
706