1
0

dashboard.py 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147
  1. import os
  2. import json
  3. import logging
  4. import asyncio
  5. import aiohttp
  6. import pandas as pd
  7. import plotly.express as px
  8. from typing import List, Dict, Optional
  9. from pathlib import Path
  10. from plotly.subplots import make_subplots
  11. import plotly.graph_objects as go
  12. import time
  13. import pygame.mixer
  14. from datetime import datetime
  15. class AsyncCircleCIClient:
  16. def __init__(self, token: str, project_slug: str):
  17. self.token = token
  18. self.project_slug = project_slug
  19. self.base_url = "https://circleci.com/api/v2"
  20. self.headers = {
  21. "Circle-Token": token,
  22. "Accept": "application/json"
  23. }
  24. self.logger = logging.getLogger("CircleCI")
  25. async def get_json(self, session: aiohttp.ClientSession, url: str, params: Dict = None) -> Dict:
  26. async with session.get(url, params=params) as response:
  27. response.raise_for_status()
  28. return await response.json()
  29. async def get_recent_pipelines(
  30. self,
  31. session: aiohttp.ClientSession,
  32. org_slug: str = None,
  33. page_token: str = None,
  34. limit: int = None,
  35. branch: str = None
  36. ):
  37. """
  38. Get recent pipelines for a project with pagination support
  39. """
  40. params = {
  41. "branch": branch,
  42. "page-token": page_token
  43. }
  44. # Remove None values
  45. params = {k: v for k, v in params.items() if v is not None}
  46. url = f"{self.base_url}/project/{self.project_slug}/pipeline"
  47. data = await self.get_json(session, url, params)
  48. pipelines = data["items"]
  49. next_page_token = data.get("next_page_token")
  50. # If we have a limit, check if we need more pages
  51. if limit and len(pipelines) >= limit:
  52. return pipelines
  53. # If there are more pages and we haven't hit the limit, recursively get them
  54. if next_page_token:
  55. next_pipelines = await self.get_recent_pipelines(
  56. session,
  57. org_slug,
  58. page_token=next_page_token,
  59. limit=limit - len(pipelines) if limit else None, # Adjust limit for next page
  60. branch=branch
  61. )
  62. pipelines.extend(next_pipelines)
  63. return pipelines
  64. async def get_workflow_jobs(self, session: aiohttp.ClientSession, pipeline_id: str) -> List[Dict]:
  65. self.logger.debug(f"Fetching workflows for pipeline {pipeline_id}")
  66. url = f"{self.base_url}/pipeline/{pipeline_id}/workflow"
  67. workflows_data = await self.get_json(session, url)
  68. workflows = workflows_data["items"]
  69. # Fetch all jobs for all workflows in parallel
  70. jobs_tasks = []
  71. for workflow in workflows:
  72. url = f"{self.base_url}/workflow/{workflow['id']}/job"
  73. jobs_tasks.append(self.get_json(session, url))
  74. jobs_responses = await asyncio.gather(*jobs_tasks, return_exceptions=True)
  75. all_jobs = []
  76. for jobs_data in jobs_responses:
  77. if isinstance(jobs_data, Exception):
  78. continue
  79. all_jobs.extend(jobs_data["items"])
  80. return all_jobs
  81. async def get_artifacts(self, session: aiohttp.ClientSession, job_number: str) -> List[Dict]:
  82. url = f"{self.base_url}/project/{self.project_slug}/{job_number}/artifacts"
  83. data = await self.get_json(session, url)
  84. return data["items"]
  85. class PackageSizeTracker:
  86. def __init__(self, token: str, project_slug: str, debug: bool = False):
  87. self.setup_logging(debug)
  88. self.client = AsyncCircleCIClient(token, project_slug)
  89. self.logger = logging.getLogger("PackageSizeTracker")
  90. self.last_data_hash = None
  91. self.debug = debug
  92. # Initialize pygame mixer
  93. pygame.mixer.init()
  94. # Sound file paths - can use MP3 files with pygame
  95. sounds_dir = Path(__file__).parent / "sounds"
  96. self.sounds = {
  97. 'lines_up': sounds_dir / "gta5_wasted.mp3",
  98. 'lines_down': sounds_dir / "pokemon_evolve.mp3",
  99. 'tokens_up': sounds_dir / "pokemon_evolve.mp3",
  100. 'tokens_down': sounds_dir / "gta5_wasted.mp3",
  101. 'size_up': sounds_dir / "gta5_wasted.mp3",
  102. 'size_down': sounds_dir / "pokemon_evolve.mp3"
  103. }
  104. def test_sound_effects(self):
  105. """Test all sound effects with a small delay between each"""
  106. self.logger.info("Testing sound effects...")
  107. for sound_key in self.sounds:
  108. self.logger.info(f"Playing {sound_key}")
  109. self._play_sound(sound_key)
  110. time.sleep(1) # Wait 1 second between sounds
  111. def setup_logging(self, debug: bool):
  112. level = logging.DEBUG if debug else logging.INFO
  113. logging.basicConfig(
  114. level=level,
  115. format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
  116. datefmt='%H:%M:%S'
  117. )
  118. def extract_commit_info(self, pipeline: Dict) -> Optional[Dict]:
  119. try:
  120. # Extract from github_app first (preferred)
  121. if 'trigger_parameters' in pipeline and 'github_app' in pipeline['trigger_parameters']:
  122. github_app = pipeline['trigger_parameters']['github_app']
  123. return {
  124. 'commit_hash': github_app.get('checkout_sha'),
  125. 'web_url': f"{github_app.get('repo_url')}/commit/{github_app.get('checkout_sha')}",
  126. 'branch': github_app.get('branch', 'unknown'),
  127. 'author': {
  128. 'name': github_app.get('commit_author_name'),
  129. 'email': github_app.get('commit_author_email'),
  130. 'username': github_app.get('user_username')
  131. },
  132. 'message': github_app.get('commit_message')
  133. }
  134. # Fallback to git parameters
  135. if 'trigger_parameters' in pipeline and 'git' in pipeline['trigger_parameters']:
  136. git = pipeline['trigger_parameters']['git']
  137. return {
  138. 'commit_hash': git.get('checkout_sha'),
  139. 'web_url': f"{git.get('repo_url')}/commit/{git.get('checkout_sha')}",
  140. 'branch': git.get('branch', 'unknown'),
  141. 'author': {
  142. 'name': git.get('commit_author_name'),
  143. 'email': git.get('commit_author_email'),
  144. 'username': git.get('author_login')
  145. },
  146. 'message': git.get('commit_message')
  147. }
  148. self.logger.warning(f"Could not find commit info in pipeline {pipeline['id']}")
  149. return None
  150. except Exception as e:
  151. self.logger.error(f"Error extracting commit info: {str(e)}")
  152. return None
  153. async def process_pipeline(self, session: aiohttp.ClientSession, pipeline: Dict) -> Optional[Dict]:
  154. try:
  155. commit_info = self.extract_commit_info(pipeline)
  156. if not commit_info:
  157. return None
  158. data_point = {
  159. "commit_hash": commit_info['commit_hash'],
  160. "commit_url": commit_info['web_url'],
  161. "timestamp": pipeline.get("created_at", pipeline.get("updated_at")),
  162. "pipeline_status": pipeline.get("state", "unknown"),
  163. "branch": commit_info['branch'],
  164. "author": commit_info['author'],
  165. "commit_message": commit_info['message']
  166. }
  167. jobs = await self.client.get_workflow_jobs(session, pipeline["id"])
  168. # Get package size data
  169. size_job = next(
  170. (j for j in jobs if j["name"] == "measure_pip_sizes" and j["status"] == "success"),
  171. None
  172. )
  173. # Get line count data
  174. linecount_job = next(
  175. (j for j in jobs if j["name"] == "check_line_count" and j["status"] == "success"),
  176. None
  177. )
  178. # Get benchmark data from runner job
  179. benchmark_job = next(
  180. (j for j in jobs if j["name"] == "runner" and j["status"] == "success"),
  181. None
  182. )
  183. # Return None if no relevant jobs found
  184. if not size_job and not linecount_job and not benchmark_job:
  185. self.logger.debug(f"No relevant jobs found for pipeline {pipeline['id']}")
  186. return None
  187. # Process benchmark data if available
  188. if benchmark_job:
  189. benchmark_artifacts = await self.client.get_artifacts(session, benchmark_job["job_number"])
  190. benchmark_report = next(
  191. (a for a in benchmark_artifacts if a["path"].endswith("benchmark.json")),
  192. None
  193. )
  194. if benchmark_report:
  195. benchmark_data = await self.client.get_json(session, benchmark_report["url"])
  196. data_point.update({
  197. "tokens_per_second": benchmark_data["tokens_per_second"],
  198. "time_to_first_token": benchmark_data.get("time_to_first_token", 0)
  199. })
  200. self.logger.info(
  201. f"Processed benchmark data for pipeline {pipeline['id']}: "
  202. f"commit {commit_info['commit_hash'][:7]}, "
  203. f"tokens/s {benchmark_data['tokens_per_second']:.2f}"
  204. )
  205. # Process size data if available
  206. if size_job:
  207. size_artifacts = await self.client.get_artifacts(session, size_job["job_number"])
  208. size_report = next(
  209. (a for a in size_artifacts if a["path"].endswith("pip-sizes.json")),
  210. None
  211. )
  212. if size_report:
  213. size_data = await self.client.get_json(session, size_report["url"])
  214. data_point.update({
  215. "total_size_mb": size_data["total_size_mb"],
  216. "packages": size_data["packages"]
  217. })
  218. self.logger.info(
  219. f"Processed size data for pipeline {pipeline['id']}: "
  220. f"commit {commit_info['commit_hash'][:7]}, "
  221. f"size {size_data['total_size_mb']:.2f}MB"
  222. )
  223. # Process linecount data if available
  224. if linecount_job:
  225. linecount_artifacts = await self.client.get_artifacts(session, linecount_job["job_number"])
  226. linecount_report = next(
  227. (a for a in linecount_artifacts if a["path"].endswith("line-count-snapshot.json")),
  228. None
  229. )
  230. if linecount_report:
  231. linecount_data = await self.client.get_json(session, linecount_report["url"])
  232. data_point.update({
  233. "total_lines": linecount_data["total_lines"],
  234. "total_files": linecount_data["total_files"],
  235. "files": linecount_data["files"]
  236. })
  237. self.logger.info(
  238. f"Processed line count data for pipeline {pipeline['id']}: "
  239. f"commit {commit_info['commit_hash'][:7]}, "
  240. f"lines {linecount_data['total_lines']:,}"
  241. )
  242. return data_point
  243. except Exception as e:
  244. self.logger.error(f"Error processing pipeline {pipeline['id']}: {str(e)}")
  245. return None
  246. async def process_pipeline_batch(
  247. self,
  248. session: aiohttp.ClientSession,
  249. pipelines: List[Dict],
  250. batch_size: int = 5
  251. ) -> List[Dict]:
  252. """
  253. Process a batch of pipelines with rate limiting.
  254. Args:
  255. session: aiohttp client session
  256. pipelines: List of pipelines to process
  257. batch_size: Number of pipelines to process in parallel
  258. Returns:
  259. List of processed pipeline data points
  260. """
  261. data_points = []
  262. for i in range(0, len(pipelines), batch_size):
  263. batch = pipelines[i:i + batch_size]
  264. # Process batch in parallel
  265. tasks = [self.process_pipeline(session, pipeline) for pipeline in batch]
  266. batch_results = await asyncio.gather(*tasks)
  267. # Filter out None results
  268. batch_data = [r for r in batch_results if r is not None]
  269. data_points.extend(batch_data)
  270. # Add delay between batches if there are more to process
  271. if i + batch_size < len(pipelines):
  272. await asyncio.sleep(1) # 1 second delay between batches
  273. return data_points
  274. async def collect_data(self) -> List[Dict]:
  275. self.logger.info("Starting data collection...")
  276. async with aiohttp.ClientSession(headers=self.client.headers) as session:
  277. # Get pipelines from main branch
  278. main_pipelines = await self.client.get_recent_pipelines(
  279. session,
  280. org_slug=self.client.project_slug,
  281. limit=20,
  282. branch="main"
  283. )
  284. # Add delay between branch requests
  285. await asyncio.sleep(2)
  286. # Get pipelines from circleci branch
  287. circleci_pipelines = await self.client.get_recent_pipelines(
  288. session,
  289. org_slug=self.client.project_slug,
  290. limit=20,
  291. branch="circleci"
  292. )
  293. # Combine pipelines and sort by created_at date
  294. pipelines = main_pipelines + circleci_pipelines
  295. pipelines.sort(
  296. key=lambda x: datetime.fromisoformat(
  297. x.get("created_at", x.get("updated_at")).replace('Z', '+00:00')
  298. ),
  299. reverse=True # Most recent first
  300. )
  301. self.logger.info(f"Found {len(pipelines)} recent pipelines")
  302. # Process pipelines in batches
  303. data_points = await self.process_pipeline_batch(session, pipelines)
  304. # Sort by timestamp
  305. data_points.sort(
  306. key=lambda x: datetime.fromisoformat(
  307. x.get("timestamp").replace('Z', '+00:00')
  308. ),
  309. reverse=True # Most recent first
  310. )
  311. return data_points
  312. def generate_report(self, data: List[Dict], output_dir: str = "reports") -> Optional[str]:
  313. self.logger.info("Generating report...")
  314. if not data:
  315. self.logger.error("No data to generate report from!")
  316. return None
  317. # Get latest pipeline status based on errors
  318. latest_main_pipeline = next((d for d in data if d.get('branch') == 'main'), None)
  319. latest_pipeline_status = 'success' if latest_main_pipeline and not latest_main_pipeline.get('errors') else 'failure'
  320. # Log the pipeline status
  321. if latest_main_pipeline:
  322. self.logger.info(
  323. f"Latest main branch pipeline status: {latest_pipeline_status} "
  324. f"(commit: {latest_main_pipeline['commit_hash'][:7]})"
  325. )
  326. else:
  327. self.logger.warning("No pipeline data found for main branch")
  328. # Convert output_dir to Path object
  329. output_dir = Path(output_dir)
  330. # Create output directory if it doesn't exist
  331. output_dir.mkdir(parents=True, exist_ok=True)
  332. # Create separate dataframes for each metric
  333. df_size = pd.DataFrame([d for d in data if 'total_size_mb' in d])
  334. df_lines = pd.DataFrame([d for d in data if 'total_lines' in d])
  335. df_benchmark = pd.DataFrame([d for d in data if 'tokens_per_second' in d])
  336. # Create a single figure with subplots
  337. fig = make_subplots(
  338. rows=3, cols=2,
  339. subplot_titles=('', 'Package Size', '', 'Line Count', '', 'Tokens per Second'),
  340. vertical_spacing=0.2,
  341. column_widths=[0.2, 0.8],
  342. specs=[[{"type": "indicator"}, {"type": "scatter"}],
  343. [None, {"type": "scatter"}],
  344. [None, {"type": "scatter"}]]
  345. )
  346. # Add package size trace if we have data
  347. if not df_size.empty:
  348. df_size['timestamp'] = pd.to_datetime(df_size['timestamp'])
  349. df_size = df_size.sort_values('timestamp')
  350. fig.add_trace(
  351. go.Scatter(
  352. x=df_size['timestamp'],
  353. y=df_size['total_size_mb'],
  354. mode='lines+markers',
  355. name='Package Size',
  356. customdata=df_size[['commit_hash', 'commit_url']].values,
  357. hovertemplate="<br>".join([
  358. "Size: %{y:.2f}MB",
  359. "Date: %{x}",
  360. "Commit: %{customdata[0]}",
  361. "<extra></extra>"
  362. ])
  363. ),
  364. row=1, col=2
  365. )
  366. fig.update_yaxes(title_text="Size (MB)", row=1, col=2)
  367. # Add line count trace if we have data
  368. if not df_lines.empty:
  369. df_lines['timestamp'] = pd.to_datetime(df_lines['timestamp'])
  370. df_lines = df_lines.sort_values('timestamp')
  371. fig.add_trace(
  372. go.Scatter(
  373. x=df_lines['timestamp'],
  374. y=df_lines['total_lines'],
  375. mode='lines+markers',
  376. name='Line Count',
  377. customdata=df_lines[['commit_hash', 'commit_url']].values,
  378. hovertemplate="<br>".join([
  379. "Lines: %{y:,.0f}",
  380. "Date: %{x}",
  381. "Commit: %{customdata[0]}",
  382. "<extra></extra>"
  383. ])
  384. ),
  385. row=2, col=2
  386. )
  387. fig.update_yaxes(title_text="Total Lines", row=2, col=2)
  388. # Add tokens per second trace if we have data
  389. if not df_benchmark.empty:
  390. df_benchmark['timestamp'] = pd.to_datetime(df_benchmark['timestamp'])
  391. df_benchmark = df_benchmark.sort_values('timestamp')
  392. fig.add_trace(
  393. go.Scatter(
  394. x=df_benchmark['timestamp'],
  395. y=df_benchmark['tokens_per_second'],
  396. mode='lines+markers',
  397. name='Tokens/Second',
  398. customdata=df_benchmark[['commit_hash', 'commit_url']].values,
  399. hovertemplate="<br>".join([
  400. "Tokens/s: %{y:.2f}",
  401. "Date: %{x}",
  402. "Commit: %{customdata[0]}",
  403. "<extra></extra>"
  404. ])
  405. ),
  406. row=3, col=2
  407. )
  408. fig.update_yaxes(title_text="Tokens per Second", row=3, col=2)
  409. # Update layout
  410. fig.update_layout(
  411. height=800,
  412. showlegend=False,
  413. title_text="Package Metrics Dashboard",
  414. title_x=0.5,
  415. plot_bgcolor='white',
  416. paper_bgcolor='white',
  417. font=dict(size=12),
  418. hovermode='x unified'
  419. )
  420. # Update the dashboard HTML with date range picker
  421. dashboard_html = f"""
  422. <html>
  423. <head>
  424. <title>Package Metrics Dashboard</title>
  425. <link rel="stylesheet" type="text/css" href="https://cdn.jsdelivr.net/npm/daterangepicker/daterangepicker.css" />
  426. <style>
  427. body {{
  428. background-color: #f5f6fa;
  429. margin: 0;
  430. padding: 20px;
  431. font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
  432. }}
  433. .date-picker-container {{
  434. background: white;
  435. padding: 15px;
  436. border-radius: 12px;
  437. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  438. margin: 20px auto;
  439. width: fit-content;
  440. }}
  441. #daterange {{
  442. padding: 8px 12px;
  443. border: 1px solid #ddd;
  444. border-radius: 8px;
  445. font-size: 14px;
  446. width: 300px;
  447. cursor: pointer;
  448. }}
  449. .quick-ranges {{
  450. margin-top: 10px;
  451. display: flex;
  452. gap: 8px;
  453. justify-content: center;
  454. }}
  455. .quick-ranges button {{
  456. padding: 8px 16px;
  457. border: 1px solid #e1e4e8;
  458. border-radius: 8px;
  459. background: white;
  460. cursor: pointer;
  461. font-size: 13px;
  462. transition: all 0.2s ease;
  463. }}
  464. .quick-ranges button:hover {{
  465. background: #f0f0f0;
  466. transform: translateY(-1px);
  467. }}
  468. .dashboard-grid {{
  469. display: grid;
  470. grid-template-columns: 300px 1fr;
  471. gap: 20px;
  472. margin-top: 20px;
  473. }}
  474. .chart-container {{
  475. background: white;
  476. border-radius: 12px;
  477. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  478. padding: 20px;
  479. height: 350px;
  480. }}
  481. .chart-row {{
  482. display: grid;
  483. grid-template-columns: repeat(2, 1fr);
  484. gap: 20px;
  485. }}
  486. .chart-row-full {{
  487. grid-column: 2 / -1;
  488. }}
  489. .chart-box {{
  490. background: white;
  491. border-radius: 12px;
  492. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  493. padding: 20px;
  494. display: flex;
  495. flex-direction: column;
  496. }}
  497. .chart-title {{
  498. font-size: 16px;
  499. font-weight: 600;
  500. color: #2c3e50;
  501. margin-bottom: 15px;
  502. padding-bottom: 10px;
  503. border-bottom: 1px solid #eee;
  504. }}
  505. .status-container {{
  506. background: white;
  507. border-radius: 12px;
  508. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  509. padding: 20px;
  510. height: 350px;
  511. display: flex;
  512. flex-direction: column;
  513. align-items: center;
  514. justify-content: center;
  515. }}
  516. .traffic-light {{
  517. width: 150px;
  518. height: 150px;
  519. border-radius: 50%;
  520. margin: 20px;
  521. box-shadow: 0 0 20px rgba(0,0,0,0.2);
  522. position: relative;
  523. }}
  524. .traffic-light.success {{
  525. background: #2ecc71; /* Bright green */
  526. border: 8px solid #27ae60; /* Darker green border */
  527. }}
  528. .traffic-light.failure {{
  529. background: #e74c3c; /* Bright red */
  530. border: 8px solid #c0392b; /* Darker red border */
  531. }}
  532. .status-text {{
  533. font-size: 24px;
  534. font-weight: bold;
  535. margin-top: 20px;
  536. color: #2c3e50;
  537. }}
  538. /* Override Plotly's default margins */
  539. .js-plotly-plot .plotly {{
  540. margin: 0 !important;
  541. }}
  542. </style>
  543. </head>
  544. <body>
  545. <div class="date-picker-container">
  546. <input type="text" id="daterange" />
  547. <div class="quick-ranges">
  548. <button onclick="setQuickRange('1h')">Last Hour</button>
  549. <button onclick="setQuickRange('6h')">Last 6 Hours</button>
  550. <button onclick="setQuickRange('1d')">Last 24 Hours</button>
  551. <button onclick="setQuickRange('7d')">Last 7 Days</button>
  552. <button onclick="setQuickRange('30d')">Last 30 Days</button>
  553. <button onclick="setQuickRange('all')">All Time</button>
  554. </div>
  555. </div>
  556. <div class="dashboard-grid">
  557. <div class="status-container">
  558. <div class="chart-title">Pipeline Status</div>
  559. <div class="traffic-light {'success' if latest_pipeline_status == 'success' else 'failure'}"></div>
  560. <div class="status-text">
  561. {'✓ Pipeline Passing' if latest_pipeline_status == 'success' else '✗ Pipeline Failing'}
  562. </div>
  563. </div>
  564. <div class="chart-row">
  565. <div class="chart-box">
  566. <div class="chart-title">Package Size</div>
  567. <div id="size-chart"></div>
  568. </div>
  569. <div class="chart-box">
  570. <div class="chart-title">Line Count</div>
  571. <div id="lines-chart"></div>
  572. </div>
  573. </div>
  574. <div class="chart-row chart-row-full">
  575. <div class="chart-box">
  576. <div class="chart-title">Tokens per Second</div>
  577. <div id="tokens-chart"></div>
  578. </div>
  579. </div>
  580. </div>
  581. <script type="text/javascript" src="https://cdn.jsdelivr.net/jquery/latest/jquery.min.js"></script>
  582. <script type="text/javascript" src="https://cdn.jsdelivr.net/momentjs/latest/moment.min.js"></script>
  583. <script type="text/javascript" src="https://cdn.jsdelivr.net/npm/daterangepicker/daterangepicker.min.js"></script>
  584. <script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
  585. <script>
  586. let globalMinDate = null;
  587. let globalMaxDate = null;
  588. // Split the original figure into separate charts
  589. const originalData = {fig.to_json()};
  590. function initializeCharts() {{
  591. // Create the size trend chart
  592. const sizeTrace = originalData.data.find(trace => trace.name === 'Package Size');
  593. if (sizeTrace) {{
  594. Plotly.newPlot('size-chart',
  595. [sizeTrace],
  596. {{
  597. showlegend: false,
  598. height: 280,
  599. margin: {{ t: 10, b: 40, l: 50, r: 20 }},
  600. yaxis: {{ title: 'Size (MB)' }},
  601. xaxis: {{
  602. type: 'date',
  603. title: null,
  604. range: [sizeTrace.x[0], sizeTrace.x[sizeTrace.x.length - 1]]
  605. }}
  606. }}
  607. );
  608. }}
  609. // Create the line count chart
  610. const lineTrace = originalData.data.find(trace => trace.name === 'Line Count');
  611. if (lineTrace) {{
  612. Plotly.newPlot('lines-chart',
  613. [lineTrace],
  614. {{
  615. showlegend: false,
  616. height: 280,
  617. margin: {{ t: 10, b: 40, l: 50, r: 20 }},
  618. yaxis: {{ title: 'Total Lines' }},
  619. xaxis: {{
  620. type: 'date',
  621. title: null,
  622. range: [lineTrace.x[0], lineTrace.x[lineTrace.x.length - 1]]
  623. }}
  624. }}
  625. );
  626. }}
  627. // Create the tokens per second chart
  628. const tokensTrace = originalData.data.find(trace => trace.name === 'Tokens/Second');
  629. if (tokensTrace) {{
  630. Plotly.newPlot('tokens-chart',
  631. [tokensTrace],
  632. {{
  633. showlegend: false,
  634. height: 280,
  635. margin: {{ t: 10, b: 40, l: 50, r: 20 }},
  636. yaxis: {{ title: 'Tokens/Second' }},
  637. xaxis: {{
  638. type: 'date',
  639. title: null,
  640. range: [tokensTrace.x[0], tokensTrace.x[tokensTrace.x.length - 1]]
  641. }}
  642. }}
  643. );
  644. }}
  645. // Add debug logs to check axis names
  646. console.log('Size Chart Layout:', document.getElementById('size-chart').layout);
  647. console.log('Lines Chart Layout:', document.getElementById('lines-chart').layout);
  648. console.log('Tokens Chart Layout:', document.getElementById('tokens-chart').layout);
  649. }}
  650. function setQuickRange(range) {{
  651. let start, end = moment();
  652. switch(range) {{
  653. case '1h':
  654. start = moment().subtract(1, 'hours');
  655. break;
  656. case '6h':
  657. start = moment().subtract(6, 'hours');
  658. break;
  659. case '1d':
  660. start = moment().subtract(1, 'days');
  661. break;
  662. case '7d':
  663. start = moment().subtract(7, 'days');
  664. break;
  665. case '30d':
  666. start = moment().subtract(30, 'days');
  667. break;
  668. case 'all':
  669. start = moment(globalMinDate);
  670. end = moment(globalMaxDate);
  671. break;
  672. }}
  673. $('#daterange').data('daterangepicker').setStartDate(start);
  674. $('#daterange').data('daterangepicker').setEndDate(end);
  675. updatePlotRange(start.toISOString(), end.toISOString());
  676. }}
  677. function updatePlotRange(startDate, endDate) {{
  678. console.log('Updating range:', startDate, endDate);
  679. // Get the actual x-axis names from the chart layouts
  680. const sizeChartLayout = document.getElementById('size-chart').layout;
  681. const sizeXAxisName = Object.keys(sizeChartLayout).find(key => key.startsWith('xaxis'));
  682. const linesChartLayout = document.getElementById('lines-chart').layout;
  683. const linesXAxisName = Object.keys(linesChartLayout).find(key => key.startsWith('xaxis'));
  684. const tokensChartLayout = document.getElementById('tokens-chart').layout;
  685. const tokensXAxisName = Object.keys(tokensChartLayout).find(key => key.startsWith('xaxis'));
  686. // Update the ranges
  687. const sizeUpdateLayout = {{}};
  688. sizeUpdateLayout[`{{sizeXAxisName}}.range`] = [startDate, endDate];
  689. const linesUpdateLayout = {{}};
  690. linesUpdateLayout[`{{linesXAxisName}}.range`] = [startDate, endDate];
  691. const tokensUpdateLayout = {{}};
  692. tokensUpdateLayout[`{{tokensXAxisName}}.range`] = [startDate, endDate];
  693. // Update both charts
  694. Plotly.relayout('size-chart', sizeUpdateLayout)
  695. .catch(err => console.error('Error updating size chart:', err));
  696. Plotly.relayout('lines-chart', linesUpdateLayout)
  697. .catch(err => console.error('Error updating lines chart:', err));
  698. Plotly.relayout('tokens-chart', tokensUpdateLayout)
  699. .catch(err => console.error('Error updating tokens chart:', err));
  700. }}
  701. function findDateRange(data) {{
  702. let minDate = null;
  703. let maxDate = null;
  704. data.forEach(trace => {{
  705. if (trace.x && trace.x.length > 0) {{
  706. const dates = trace.x.map(d => new Date(d));
  707. const traceMin = new Date(Math.min(...dates));
  708. const traceMax = new Date(Math.max(...dates));
  709. if (!minDate || traceMin < minDate) minDate = traceMin;
  710. if (!maxDate || traceMax > maxDate) maxDate = traceMax;
  711. }}
  712. }});
  713. return {{ minDate, maxDate }};
  714. }}
  715. // Initialize everything when document is ready
  716. $(document).ready(function() {{
  717. // Initialize charts
  718. initializeCharts();
  719. // Find date range from data
  720. const {{ minDate, maxDate }} = findDateRange(originalData.data);
  721. globalMinDate = minDate;
  722. globalMaxDate = maxDate;
  723. // Initialize daterangepicker
  724. $('#daterange').daterangepicker({{
  725. startDate: minDate,
  726. endDate: maxDate,
  727. minDate: minDate,
  728. maxDate: maxDate,
  729. timePicker: true,
  730. timePicker24Hour: true,
  731. timePickerIncrement: 1,
  732. opens: 'center',
  733. locale: {{
  734. format: 'YYYY-MM-DD HH:mm',
  735. applyLabel: "Apply",
  736. cancelLabel: "Cancel",
  737. customRangeLabel: "Custom Range"
  738. }},
  739. ranges: {{
  740. 'Last Hour': [moment().subtract(1, 'hours'), moment()],
  741. 'Last 6 Hours': [moment().subtract(6, 'hours'), moment()],
  742. 'Last 24 Hours': [moment().subtract(1, 'days'), moment()],
  743. 'Last 7 Days': [moment().subtract(7, 'days'), moment()],
  744. 'Last 30 Days': [moment().subtract(30, 'days'), moment()],
  745. 'All Time': [moment(minDate), moment(maxDate)]
  746. }}
  747. }});
  748. // Update plots when date range changes
  749. $('#daterange').on('apply.daterangepicker', function(ev, picker) {{
  750. console.log('Date range changed:', picker.startDate.toISOString(), picker.endDate.toISOString());
  751. updatePlotRange(picker.startDate.toISOString(), picker.endDate.toISOString());
  752. }});
  753. // Add click handlers for charts
  754. ['size-chart', 'lines-chart', 'tokens-chart'].forEach(chartId => {{
  755. const chart = document.getElementById(chartId);
  756. if (chart) {{
  757. chart.on('plotly_click', function(data) {{
  758. const point = data.points[0];
  759. if (point.customdata && point.customdata[1]) {{
  760. window.open(point.customdata[1], '_blank');
  761. }}
  762. }});
  763. }}
  764. }});
  765. // Add debug logging for chart initialization
  766. console.log('Size Chart:', document.getElementById('size-chart'));
  767. console.log('Lines Chart:', document.getElementById('lines-chart'));
  768. console.log('Tokens Chart:', document.getElementById('tokens-chart'));
  769. }});
  770. </script>
  771. </body>
  772. </html>
  773. """
  774. # Write the dashboard
  775. dashboard_path = output_dir / "dashboard.html"
  776. with open(dashboard_path, "w") as f:
  777. f.write(dashboard_html)
  778. # Generate summary with available metrics
  779. latest_data = {}
  780. if not df_size.empty:
  781. latest = df_size.iloc[-1]
  782. previous = df_size.iloc[-2] if len(df_size) > 1 else latest
  783. size_change = float(latest['total_size_mb'] - previous['total_size_mb'])
  784. latest_data.update({
  785. 'timestamp': latest['timestamp'].isoformat(),
  786. 'commit_hash': latest['commit_hash'],
  787. 'commit_url': latest['commit_url'],
  788. 'total_size_mb': float(latest['total_size_mb']),
  789. 'size_change_mb': size_change,
  790. 'packages': latest.get('packages', [])
  791. })
  792. if not df_lines.empty:
  793. latest = df_lines.iloc[-1]
  794. previous = df_lines.iloc[-2] if len(df_lines) > 1 else latest
  795. linecount_change = int(latest['total_lines'] - previous['total_lines'])
  796. if not latest_data: # Only add timestamp and commit info if not already added
  797. latest_data.update({
  798. 'timestamp': latest['timestamp'].isoformat(),
  799. 'commit_hash': latest['commit_hash'],
  800. 'commit_url': latest['commit_url'],
  801. })
  802. latest_data.update({
  803. 'total_lines': int(latest['total_lines']),
  804. 'linecount_change': linecount_change
  805. })
  806. if not df_benchmark.empty:
  807. latest = df_benchmark.iloc[-1]
  808. previous = df_benchmark.iloc[-2] if len(df_benchmark) > 1 else latest
  809. tokens_change = float(latest['tokens_per_second'] - previous['tokens_per_second'])
  810. if not latest_data: # Only add timestamp and commit info if not already added
  811. latest_data.update({
  812. 'timestamp': latest['timestamp'].isoformat(),
  813. 'commit_hash': latest['commit_hash'],
  814. 'commit_url': latest['commit_url'],
  815. })
  816. latest_data.update({
  817. 'tokens_per_second': float(latest['tokens_per_second']),
  818. 'tokens_change': tokens_change
  819. })
  820. if latest_data:
  821. with open(output_dir / 'latest_data.json', 'w') as f:
  822. json.dump(latest_data, f, indent=2)
  823. self._print_summary(latest_data)
  824. self.logger.info(f"Report generated in {output_dir}")
  825. return str(output_dir)
  826. return None
  827. def _print_summary(self, latest_data: Dict):
  828. print("\n=== Package Size Summary ===")
  829. print(f"Timestamp: {latest_data['timestamp']}")
  830. print(f"Commit: {latest_data['commit_hash'][:7]}")
  831. if 'total_size_mb' in latest_data:
  832. print(f"Total Size: {latest_data['total_size_mb']:.2f}MB")
  833. change = latest_data['size_change_mb']
  834. change_symbol = "↓" if change <= 0 else "↑"
  835. print(f"Change: {change_symbol} {abs(change):.2f}MB")
  836. if latest_data.get('packages'):
  837. print("\nTop 5 Largest Packages:")
  838. sorted_packages = sorted(latest_data['packages'], key=lambda x: x['size_mb'], reverse=True)
  839. for pkg in sorted_packages[:5]:
  840. print(f"- {pkg['name']}: {pkg['size_mb']:.2f}MB")
  841. if 'total_lines' in latest_data:
  842. print("\nLine Count Stats:")
  843. print(f"Total Lines: {latest_data['total_lines']:,}")
  844. change = latest_data['linecount_change']
  845. change_symbol = "↓" if change <= 0 else "↑"
  846. print(f"Change: {change_symbol} {abs(change):,}")
  847. if 'tokens_per_second' in latest_data:
  848. print("\nBenchmark Stats:")
  849. print(f"Tokens per Second: {latest_data['tokens_per_second']:.2f}")
  850. if 'time_to_first_token' in latest_data:
  851. print(f"Time to First Token: {latest_data['time_to_first_token']:.3f}s")
  852. print("\n")
  853. def _calculate_data_hash(self, data: List[Dict]) -> str:
  854. """Calculate a hash of the data to detect changes"""
  855. return hash(str(sorted([
  856. (d.get('commit_hash'), d.get('timestamp'))
  857. for d in data
  858. ])))
  859. def _play_sound(self, sound_key: str):
  860. """Play a specific notification sound using pygame"""
  861. try:
  862. sound_path = self.sounds.get(sound_key)
  863. if sound_path and sound_path.exists():
  864. sound = pygame.mixer.Sound(str(sound_path))
  865. sound.play()
  866. # Wait for the sound to finish playing
  867. pygame.time.wait(int(sound.get_length() * 1000))
  868. else:
  869. self.logger.warning(f"Sound file not found: {sound_key} at {sound_path}")
  870. except Exception as e:
  871. self.logger.error(f"Failed to play sound {sound_key}: {e}")
  872. def _check_metrics_changes(self, current_data: List[Dict], previous_data: List[Dict]):
  873. # Sort data by timestamp in descending order (most recent first)
  874. def sort_by_timestamp(data):
  875. return sorted(
  876. data,
  877. key=lambda x: x.get('timestamp', ''),
  878. reverse=True # Most recent first
  879. )
  880. current_data = sort_by_timestamp(current_data)
  881. previous_data = sort_by_timestamp(previous_data)
  882. # Helper to find latest entry with a specific metric
  883. def find_latest_with_metric(data: List[Dict], metric: str) -> Optional[Dict]:
  884. return next((d for d in data if metric in d), None)
  885. # Check line count changes
  886. current_lines = find_latest_with_metric(current_data, 'total_lines')
  887. previous_lines = find_latest_with_metric(previous_data, 'total_lines')
  888. if current_lines and previous_lines:
  889. diff = current_lines['total_lines'] - previous_lines['total_lines']
  890. self.logger.debug(f"Lines of code diff: {diff}")
  891. if diff > 0:
  892. self.logger.info(f"Lines of code increased by {diff:,}")
  893. self._play_sound('lines_up')
  894. elif diff < 0:
  895. self.logger.info(f"Lines of code decreased by {abs(diff):,}")
  896. self._play_sound('lines_down')
  897. else:
  898. self.logger.debug("No lines of code data found")
  899. # Check tokens per second changes
  900. current_tokens = find_latest_with_metric(current_data, 'tokens_per_second')
  901. previous_tokens = find_latest_with_metric(previous_data, 'tokens_per_second')
  902. if current_tokens and previous_tokens:
  903. diff = current_tokens['tokens_per_second'] - previous_tokens['tokens_per_second']
  904. self.logger.debug(f"Tokens per second diff: {diff}")
  905. if diff > 0:
  906. self.logger.info(f"Tokens per second increased by {diff:.2f}")
  907. self._play_sound('tokens_up')
  908. elif diff < 0:
  909. self.logger.info(f"Tokens per second decreased by {abs(diff):.2f}")
  910. self._play_sound('tokens_down')
  911. else:
  912. self.logger.debug("No tokens per second data found")
  913. # Check package size changes
  914. current_size = find_latest_with_metric(current_data, 'total_size_mb')
  915. previous_size = find_latest_with_metric(previous_data, 'total_size_mb')
  916. if current_size and previous_size:
  917. diff = current_size['total_size_mb'] - previous_size['total_size_mb']
  918. self.logger.debug(f"Package size diff: {diff:.2f}MB")
  919. if diff > 0:
  920. self.logger.info(f"Package size increased by {diff:.2f}MB")
  921. self._play_sound('size_up')
  922. elif diff < 0:
  923. self.logger.info(f"Package size decreased by {abs(diff):.2f}MB")
  924. self._play_sound('size_down')
  925. else:
  926. self.logger.debug("No package size data found")
  927. async def run_dashboard(self, update_interval: int = 10):
  928. """Run the dashboard with periodic updates"""
  929. try:
  930. update_interval = float(update_interval)
  931. self.logger.debug(f"Update interval type: {type(update_interval)}, value: {update_interval}")
  932. except ValueError as e:
  933. self.logger.error(f"Failed to convert update_interval to float: {update_interval}")
  934. raise
  935. self.logger.info(f"Starting real-time dashboard with {update_interval}s updates")
  936. previous_data = None
  937. while True:
  938. try:
  939. start_time = time.time()
  940. # Collect new data
  941. current_data = await self.collect_data()
  942. if not current_data:
  943. self.logger.warning("No data collected")
  944. await asyncio.sleep(update_interval)
  945. continue
  946. # Generate report
  947. report_path = self.generate_report(current_data)
  948. if report_path:
  949. self.logger.info(
  950. f"Dashboard updated at {datetime.now().strftime('%H:%M:%S')}"
  951. )
  952. print("Curr:", len(current_data))
  953. print("Prev:", len(previous_data) if previous_data else "None")
  954. if previous_data:
  955. # Check for metric changes and play appropriate sounds
  956. self.logger.debug(f"Checking metrics changes between {len(current_data)} current and {len(previous_data)} previous data points")
  957. self._check_metrics_changes(current_data, previous_data)
  958. # Update previous data
  959. previous_data = current_data.copy() # Make a copy to prevent reference issues
  960. # Calculate sleep time
  961. elapsed = float(time.time() - start_time)
  962. sleep_time = max(0.0, update_interval - elapsed)
  963. await asyncio.sleep(sleep_time)
  964. except Exception as e:
  965. self.logger.error(f"Error in dashboard update loop: {e}", exc_info=True)
  966. if self.debug:
  967. raise
  968. await asyncio.sleep(update_interval)
  969. async def main():
  970. token = os.getenv("CIRCLECI_TOKEN")
  971. project_slug = os.getenv("CIRCLECI_PROJECT_SLUG")
  972. debug = os.getenv("DEBUG", "").lower() in ("true", "1", "yes")
  973. try:
  974. # Get update interval from environment or use default
  975. update_interval = float(os.getenv("UPDATE_INTERVAL", "10"))
  976. print(f"Update interval type: {type(update_interval)}, value: {update_interval}") # Debug print
  977. except ValueError as e:
  978. print(f"Error converting UPDATE_INTERVAL to float: {os.getenv('UPDATE_INTERVAL')}")
  979. update_interval = 10.0
  980. if not token or not project_slug:
  981. print("Error: Please set CIRCLECI_TOKEN and CIRCLECI_PROJECT_SLUG environment variables")
  982. return
  983. tracker = PackageSizeTracker(token, project_slug, debug)
  984. try:
  985. await tracker.run_dashboard(update_interval)
  986. except KeyboardInterrupt:
  987. print("\nDashboard stopped by user")
  988. except Exception as e:
  989. logging.error(f"Error: {str(e)}", exc_info=True)
  990. if debug:
  991. raise
  992. if __name__ == "__main__":
  993. asyncio.run(main())