dashboard.py 46 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101
  1. import os
  2. import json
  3. import logging
  4. import asyncio
  5. import aiohttp
  6. import pandas as pd
  7. import plotly.express as px
  8. from typing import List, Dict, Optional
  9. from pathlib import Path
  10. from plotly.subplots import make_subplots
  11. import plotly.graph_objects as go
  12. import time
  13. import pygame.mixer
  14. from datetime import datetime
  15. class AsyncCircleCIClient:
  16. def __init__(self, token: str, project_slug: str):
  17. self.token = token
  18. self.project_slug = project_slug
  19. self.base_url = "https://circleci.com/api/v2"
  20. self.headers = {
  21. "Circle-Token": token,
  22. "Accept": "application/json"
  23. }
  24. self.logger = logging.getLogger("CircleCI")
  25. async def get_json(self, session: aiohttp.ClientSession, url: str, params: Dict = None) -> Dict:
  26. async with session.get(url, params=params) as response:
  27. response.raise_for_status()
  28. return await response.json()
  29. async def get_recent_pipelines(
  30. self,
  31. session: aiohttp.ClientSession,
  32. org_slug: str = None,
  33. page_token: str = None,
  34. limit: int = None,
  35. branch: str = None
  36. ):
  37. """
  38. Get recent pipelines for a project with pagination support
  39. Args:
  40. session: aiohttp client session
  41. org_slug: Organization slug
  42. page_token: Token for pagination
  43. limit: Maximum number of pipelines to return
  44. branch: Specific branch to fetch pipelines from
  45. """
  46. params = {
  47. "branch": branch,
  48. "page-token": page_token
  49. }
  50. # Remove None values
  51. params = {k: v for k, v in params.items() if v is not None}
  52. url = f"{self.base_url}/project/{self.project_slug}/pipeline"
  53. data = await self.get_json(session, url, params)
  54. pipelines = data["items"]
  55. next_page_token = data.get("next_page_token")
  56. # If there are more pages and we haven't hit the limit, recursively get them
  57. if next_page_token and (limit is None or len(pipelines) < limit):
  58. next_pipelines = await self.get_recent_pipelines(
  59. session,
  60. org_slug,
  61. page_token=next_page_token,
  62. limit=limit,
  63. branch=branch
  64. )
  65. pipelines.extend(next_pipelines)
  66. return pipelines
  67. async def get_workflow_jobs(self, session: aiohttp.ClientSession, pipeline_id: str) -> List[Dict]:
  68. self.logger.debug(f"Fetching workflows for pipeline {pipeline_id}")
  69. url = f"{self.base_url}/pipeline/{pipeline_id}/workflow"
  70. workflows_data = await self.get_json(session, url)
  71. workflows = workflows_data["items"]
  72. # Fetch all jobs for all workflows in parallel
  73. jobs_tasks = []
  74. for workflow in workflows:
  75. url = f"{self.base_url}/workflow/{workflow['id']}/job"
  76. jobs_tasks.append(self.get_json(session, url))
  77. jobs_responses = await asyncio.gather(*jobs_tasks, return_exceptions=True)
  78. all_jobs = []
  79. for jobs_data in jobs_responses:
  80. if isinstance(jobs_data, Exception):
  81. continue
  82. all_jobs.extend(jobs_data["items"])
  83. return all_jobs
  84. async def get_artifacts(self, session: aiohttp.ClientSession, job_number: str) -> List[Dict]:
  85. url = f"{self.base_url}/project/{self.project_slug}/{job_number}/artifacts"
  86. data = await self.get_json(session, url)
  87. return data["items"]
  88. class PackageSizeTracker:
  89. def __init__(self, token: str, project_slug: str, debug: bool = False):
  90. self.setup_logging(debug)
  91. self.client = AsyncCircleCIClient(token, project_slug)
  92. self.logger = logging.getLogger("PackageSizeTracker")
  93. self.last_data_hash = None
  94. self.debug = debug
  95. # Initialize pygame mixer
  96. pygame.mixer.init()
  97. # Sound file paths - can use MP3 files with pygame
  98. sounds_dir = Path(__file__).parent / "sounds"
  99. self.sounds = {
  100. 'lines_up': sounds_dir / "gta5_wasted.mp3",
  101. 'lines_down': sounds_dir / "pokemon_evolve.mp3",
  102. 'tokens_up': sounds_dir / "pokemon_evolve.mp3",
  103. 'tokens_down': sounds_dir / "gta5_wasted.mp3",
  104. 'size_up': sounds_dir / "gta5_wasted.mp3",
  105. 'size_down': sounds_dir / "pokemon_evolve.mp3"
  106. }
  107. def test_sound_effects(self):
  108. """Test all sound effects with a small delay between each"""
  109. self.logger.info("Testing sound effects...")
  110. for sound_key in self.sounds:
  111. self.logger.info(f"Playing {sound_key}")
  112. self._play_sound(sound_key)
  113. time.sleep(1) # Wait 1 second between sounds
  114. def setup_logging(self, debug: bool):
  115. level = logging.DEBUG if debug else logging.INFO
  116. logging.basicConfig(
  117. level=level,
  118. format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
  119. datefmt='%H:%M:%S'
  120. )
  121. def extract_commit_info(self, pipeline: Dict) -> Optional[Dict]:
  122. try:
  123. # Extract from github_app first (preferred)
  124. if 'trigger_parameters' in pipeline and 'github_app' in pipeline['trigger_parameters']:
  125. github_app = pipeline['trigger_parameters']['github_app']
  126. return {
  127. 'commit_hash': github_app.get('checkout_sha'),
  128. 'web_url': f"{github_app.get('repo_url')}/commit/{github_app.get('checkout_sha')}",
  129. 'branch': github_app.get('branch', 'unknown'),
  130. 'author': {
  131. 'name': github_app.get('commit_author_name'),
  132. 'email': github_app.get('commit_author_email'),
  133. 'username': github_app.get('user_username')
  134. },
  135. 'message': github_app.get('commit_message')
  136. }
  137. # Fallback to git parameters
  138. if 'trigger_parameters' in pipeline and 'git' in pipeline['trigger_parameters']:
  139. git = pipeline['trigger_parameters']['git']
  140. return {
  141. 'commit_hash': git.get('checkout_sha'),
  142. 'web_url': f"{git.get('repo_url')}/commit/{git.get('checkout_sha')}",
  143. 'branch': git.get('branch', 'unknown'),
  144. 'author': {
  145. 'name': git.get('commit_author_name'),
  146. 'email': git.get('commit_author_email'),
  147. 'username': git.get('author_login')
  148. },
  149. 'message': git.get('commit_message')
  150. }
  151. self.logger.warning(f"Could not find commit info in pipeline {pipeline['id']}")
  152. return None
  153. except Exception as e:
  154. self.logger.error(f"Error extracting commit info: {str(e)}")
  155. return None
  156. async def process_pipeline(self, session: aiohttp.ClientSession, pipeline: Dict) -> Optional[Dict]:
  157. try:
  158. commit_info = self.extract_commit_info(pipeline)
  159. if not commit_info:
  160. return None
  161. data_point = {
  162. "commit_hash": commit_info['commit_hash'],
  163. "commit_url": commit_info['web_url'],
  164. "timestamp": pipeline.get("created_at", pipeline.get("updated_at")),
  165. "pipeline_status": pipeline.get("state", "unknown"),
  166. "branch": commit_info['branch'],
  167. "author": commit_info['author'],
  168. "commit_message": commit_info['message']
  169. }
  170. jobs = await self.client.get_workflow_jobs(session, pipeline["id"])
  171. # Get package size data
  172. size_job = next(
  173. (j for j in jobs if j["name"] == "measure_pip_sizes" and j["status"] == "success"),
  174. None
  175. )
  176. # Get line count data
  177. linecount_job = next(
  178. (j for j in jobs if j["name"] == "check_line_count" and j["status"] == "success"),
  179. None
  180. )
  181. # Get benchmark data from runner job
  182. benchmark_job = next(
  183. (j for j in jobs if j["name"] == "runner" and j["status"] == "success"),
  184. None
  185. )
  186. # Return None if no relevant jobs found
  187. if not size_job and not linecount_job and not benchmark_job:
  188. self.logger.debug(f"No relevant jobs found for pipeline {pipeline['id']}")
  189. return None
  190. # Process benchmark data if available
  191. if benchmark_job:
  192. benchmark_artifacts = await self.client.get_artifacts(session, benchmark_job["job_number"])
  193. benchmark_report = next(
  194. (a for a in benchmark_artifacts if a["path"].endswith("benchmark.json")),
  195. None
  196. )
  197. if benchmark_report:
  198. benchmark_data = await self.client.get_json(session, benchmark_report["url"])
  199. data_point.update({
  200. "tokens_per_second": benchmark_data["tokens_per_second"],
  201. "time_to_first_token": benchmark_data.get("time_to_first_token", 0)
  202. })
  203. self.logger.info(
  204. f"Processed benchmark data for pipeline {pipeline['id']}: "
  205. f"commit {commit_info['commit_hash'][:7]}, "
  206. f"tokens/s {benchmark_data['tokens_per_second']:.2f}"
  207. )
  208. # Process size data if available
  209. if size_job:
  210. size_artifacts = await self.client.get_artifacts(session, size_job["job_number"])
  211. size_report = next(
  212. (a for a in size_artifacts if a["path"].endswith("pip-sizes.json")),
  213. None
  214. )
  215. if size_report:
  216. size_data = await self.client.get_json(session, size_report["url"])
  217. data_point.update({
  218. "total_size_mb": size_data["total_size_mb"],
  219. "packages": size_data["packages"]
  220. })
  221. self.logger.info(
  222. f"Processed size data for pipeline {pipeline['id']}: "
  223. f"commit {commit_info['commit_hash'][:7]}, "
  224. f"size {size_data['total_size_mb']:.2f}MB"
  225. )
  226. # Process linecount data if available
  227. if linecount_job:
  228. linecount_artifacts = await self.client.get_artifacts(session, linecount_job["job_number"])
  229. linecount_report = next(
  230. (a for a in linecount_artifacts if a["path"].endswith("line-count-snapshot.json")),
  231. None
  232. )
  233. if linecount_report:
  234. linecount_data = await self.client.get_json(session, linecount_report["url"])
  235. data_point.update({
  236. "total_lines": linecount_data["total_lines"],
  237. "total_files": linecount_data["total_files"],
  238. "files": linecount_data["files"]
  239. })
  240. self.logger.info(
  241. f"Processed line count data for pipeline {pipeline['id']}: "
  242. f"commit {commit_info['commit_hash'][:7]}, "
  243. f"lines {linecount_data['total_lines']:,}"
  244. )
  245. return data_point
  246. except Exception as e:
  247. self.logger.error(f"Error processing pipeline {pipeline['id']}: {str(e)}")
  248. return None
  249. async def collect_data(self) -> List[Dict]:
  250. self.logger.info("Starting data collection...")
  251. async with aiohttp.ClientSession(headers=self.client.headers) as session:
  252. # Get pipelines from both main and circleci branches
  253. main_pipelines = await self.client.get_recent_pipelines(
  254. session,
  255. org_slug=self.client.project_slug,
  256. limit=20,
  257. branch="main"
  258. )
  259. circleci_pipelines = await self.client.get_recent_pipelines(
  260. session,
  261. org_slug=self.client.project_slug,
  262. limit=20,
  263. branch="circleci"
  264. )
  265. pipelines = main_pipelines + circleci_pipelines
  266. # Sort pipelines by created_at date
  267. pipelines.sort(key=lambda x: x.get("created_at", x.get("updated_at")), reverse=True)
  268. self.logger.info(f"Found {len(pipelines)} recent pipelines")
  269. # Process all pipelines in parallel
  270. tasks = [self.process_pipeline(session, pipeline) for pipeline in pipelines]
  271. results = await asyncio.gather(*tasks)
  272. # Filter out None results
  273. data_points = [r for r in results if r is not None]
  274. return data_points
  275. def generate_report(self, data: List[Dict], output_dir: str = "reports") -> Optional[str]:
  276. self.logger.info("Generating report...")
  277. if not data:
  278. self.logger.error("No data to generate report from!")
  279. return None
  280. # Get latest pipeline status based on errors
  281. latest_main_pipeline = next((d for d in data if d.get('branch') == 'main'), None)
  282. latest_pipeline_status = 'success' if latest_main_pipeline and not latest_main_pipeline.get('errors') else 'failure'
  283. # Log the pipeline status
  284. if latest_main_pipeline:
  285. self.logger.info(
  286. f"Latest main branch pipeline status: {latest_pipeline_status} "
  287. f"(commit: {latest_main_pipeline['commit_hash'][:7]})"
  288. )
  289. else:
  290. self.logger.warning("No pipeline data found for main branch")
  291. # Convert output_dir to Path object
  292. output_dir = Path(output_dir)
  293. # Create output directory if it doesn't exist
  294. output_dir.mkdir(parents=True, exist_ok=True)
  295. # Create separate dataframes for each metric
  296. df_size = pd.DataFrame([d for d in data if 'total_size_mb' in d])
  297. df_lines = pd.DataFrame([d for d in data if 'total_lines' in d])
  298. df_benchmark = pd.DataFrame([d for d in data if 'tokens_per_second' in d])
  299. # Create a single figure with subplots
  300. fig = make_subplots(
  301. rows=3, cols=2,
  302. subplot_titles=('', 'Package Size', '', 'Line Count', '', 'Tokens per Second'),
  303. vertical_spacing=0.2,
  304. column_widths=[0.2, 0.8],
  305. specs=[[{"type": "indicator"}, {"type": "scatter"}],
  306. [None, {"type": "scatter"}],
  307. [None, {"type": "scatter"}]]
  308. )
  309. # Add package size trace if we have data
  310. if not df_size.empty:
  311. df_size['timestamp'] = pd.to_datetime(df_size['timestamp'])
  312. df_size = df_size.sort_values('timestamp')
  313. fig.add_trace(
  314. go.Scatter(
  315. x=df_size['timestamp'],
  316. y=df_size['total_size_mb'],
  317. mode='lines+markers',
  318. name='Package Size',
  319. customdata=df_size[['commit_hash', 'commit_url']].values,
  320. hovertemplate="<br>".join([
  321. "Size: %{y:.2f}MB",
  322. "Date: %{x}",
  323. "Commit: %{customdata[0]}",
  324. "<extra></extra>"
  325. ])
  326. ),
  327. row=1, col=2
  328. )
  329. fig.update_yaxes(title_text="Size (MB)", row=1, col=2)
  330. # Add line count trace if we have data
  331. if not df_lines.empty:
  332. df_lines['timestamp'] = pd.to_datetime(df_lines['timestamp'])
  333. df_lines = df_lines.sort_values('timestamp')
  334. fig.add_trace(
  335. go.Scatter(
  336. x=df_lines['timestamp'],
  337. y=df_lines['total_lines'],
  338. mode='lines+markers',
  339. name='Line Count',
  340. customdata=df_lines[['commit_hash', 'commit_url']].values,
  341. hovertemplate="<br>".join([
  342. "Lines: %{y:,.0f}",
  343. "Date: %{x}",
  344. "Commit: %{customdata[0]}",
  345. "<extra></extra>"
  346. ])
  347. ),
  348. row=2, col=2
  349. )
  350. fig.update_yaxes(title_text="Total Lines", row=2, col=2)
  351. # Add tokens per second trace if we have data
  352. if not df_benchmark.empty:
  353. df_benchmark['timestamp'] = pd.to_datetime(df_benchmark['timestamp'])
  354. df_benchmark = df_benchmark.sort_values('timestamp')
  355. fig.add_trace(
  356. go.Scatter(
  357. x=df_benchmark['timestamp'],
  358. y=df_benchmark['tokens_per_second'],
  359. mode='lines+markers',
  360. name='Tokens/Second',
  361. customdata=df_benchmark[['commit_hash', 'commit_url']].values,
  362. hovertemplate="<br>".join([
  363. "Tokens/s: %{y:.2f}",
  364. "Date: %{x}",
  365. "Commit: %{customdata[0]}",
  366. "<extra></extra>"
  367. ])
  368. ),
  369. row=3, col=2
  370. )
  371. fig.update_yaxes(title_text="Tokens per Second", row=3, col=2)
  372. # Update layout
  373. fig.update_layout(
  374. height=800,
  375. showlegend=False,
  376. title_text="Package Metrics Dashboard",
  377. title_x=0.5,
  378. plot_bgcolor='white',
  379. paper_bgcolor='white',
  380. font=dict(size=12),
  381. hovermode='x unified'
  382. )
  383. # Update the dashboard HTML with date range picker
  384. dashboard_html = f"""
  385. <html>
  386. <head>
  387. <title>Package Metrics Dashboard</title>
  388. <link rel="stylesheet" type="text/css" href="https://cdn.jsdelivr.net/npm/daterangepicker/daterangepicker.css" />
  389. <style>
  390. body {{
  391. background-color: #f5f6fa;
  392. margin: 0;
  393. padding: 20px;
  394. font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
  395. }}
  396. .date-picker-container {{
  397. background: white;
  398. padding: 15px;
  399. border-radius: 12px;
  400. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  401. margin: 20px auto;
  402. width: fit-content;
  403. }}
  404. #daterange {{
  405. padding: 8px 12px;
  406. border: 1px solid #ddd;
  407. border-radius: 8px;
  408. font-size: 14px;
  409. width: 300px;
  410. cursor: pointer;
  411. }}
  412. .quick-ranges {{
  413. margin-top: 10px;
  414. display: flex;
  415. gap: 8px;
  416. justify-content: center;
  417. }}
  418. .quick-ranges button {{
  419. padding: 8px 16px;
  420. border: 1px solid #e1e4e8;
  421. border-radius: 8px;
  422. background: white;
  423. cursor: pointer;
  424. font-size: 13px;
  425. transition: all 0.2s ease;
  426. }}
  427. .quick-ranges button:hover {{
  428. background: #f0f0f0;
  429. transform: translateY(-1px);
  430. }}
  431. .dashboard-grid {{
  432. display: grid;
  433. grid-template-columns: 300px 1fr;
  434. gap: 20px;
  435. margin-top: 20px;
  436. }}
  437. .chart-container {{
  438. background: white;
  439. border-radius: 12px;
  440. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  441. padding: 20px;
  442. height: 350px;
  443. }}
  444. .chart-row {{
  445. display: grid;
  446. grid-template-columns: repeat(2, 1fr);
  447. gap: 20px;
  448. }}
  449. .chart-row-full {{
  450. grid-column: 2 / -1;
  451. }}
  452. .chart-box {{
  453. background: white;
  454. border-radius: 12px;
  455. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  456. padding: 20px;
  457. display: flex;
  458. flex-direction: column;
  459. }}
  460. .chart-title {{
  461. font-size: 16px;
  462. font-weight: 600;
  463. color: #2c3e50;
  464. margin-bottom: 15px;
  465. padding-bottom: 10px;
  466. border-bottom: 1px solid #eee;
  467. }}
  468. .status-container {{
  469. background: white;
  470. border-radius: 12px;
  471. box-shadow: 0 2px 4px rgba(0,0,0,0.1);
  472. padding: 20px;
  473. height: 350px;
  474. display: flex;
  475. flex-direction: column;
  476. align-items: center;
  477. justify-content: center;
  478. }}
  479. .traffic-light {{
  480. width: 150px;
  481. height: 150px;
  482. border-radius: 50%;
  483. margin: 20px;
  484. box-shadow: 0 0 20px rgba(0,0,0,0.2);
  485. position: relative;
  486. }}
  487. .traffic-light.success {{
  488. background: #2ecc71; /* Bright green */
  489. border: 8px solid #27ae60; /* Darker green border */
  490. }}
  491. .traffic-light.failure {{
  492. background: #e74c3c; /* Bright red */
  493. border: 8px solid #c0392b; /* Darker red border */
  494. }}
  495. .status-text {{
  496. font-size: 24px;
  497. font-weight: bold;
  498. margin-top: 20px;
  499. color: #2c3e50;
  500. }}
  501. /* Override Plotly's default margins */
  502. .js-plotly-plot .plotly {{
  503. margin: 0 !important;
  504. }}
  505. </style>
  506. </head>
  507. <body>
  508. <div class="date-picker-container">
  509. <input type="text" id="daterange" />
  510. <div class="quick-ranges">
  511. <button onclick="setQuickRange('1h')">Last Hour</button>
  512. <button onclick="setQuickRange('6h')">Last 6 Hours</button>
  513. <button onclick="setQuickRange('1d')">Last 24 Hours</button>
  514. <button onclick="setQuickRange('7d')">Last 7 Days</button>
  515. <button onclick="setQuickRange('30d')">Last 30 Days</button>
  516. <button onclick="setQuickRange('all')">All Time</button>
  517. </div>
  518. </div>
  519. <div class="dashboard-grid">
  520. <div class="status-container">
  521. <div class="chart-title">Pipeline Status</div>
  522. <div class="traffic-light {'success' if latest_pipeline_status == 'success' else 'failure'}"></div>
  523. <div class="status-text">
  524. {'✓ Pipeline Passing' if latest_pipeline_status == 'success' else '✗ Pipeline Failing'}
  525. </div>
  526. </div>
  527. <div class="chart-row">
  528. <div class="chart-box">
  529. <div class="chart-title">Package Size</div>
  530. <div id="size-chart"></div>
  531. </div>
  532. <div class="chart-box">
  533. <div class="chart-title">Line Count</div>
  534. <div id="lines-chart"></div>
  535. </div>
  536. </div>
  537. <div class="chart-row chart-row-full">
  538. <div class="chart-box">
  539. <div class="chart-title">Tokens per Second</div>
  540. <div id="tokens-chart"></div>
  541. </div>
  542. </div>
  543. </div>
  544. <script type="text/javascript" src="https://cdn.jsdelivr.net/jquery/latest/jquery.min.js"></script>
  545. <script type="text/javascript" src="https://cdn.jsdelivr.net/momentjs/latest/moment.min.js"></script>
  546. <script type="text/javascript" src="https://cdn.jsdelivr.net/npm/daterangepicker/daterangepicker.min.js"></script>
  547. <script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
  548. <script>
  549. let globalMinDate = null;
  550. let globalMaxDate = null;
  551. // Split the original figure into separate charts
  552. const originalData = {fig.to_json()};
  553. function initializeCharts() {{
  554. // Create the size trend chart
  555. const sizeTrace = originalData.data.find(trace => trace.name === 'Package Size');
  556. if (sizeTrace) {{
  557. Plotly.newPlot('size-chart',
  558. [sizeTrace],
  559. {{
  560. showlegend: false,
  561. height: 280,
  562. margin: {{ t: 10, b: 40, l: 50, r: 20 }},
  563. yaxis: {{ title: 'Size (MB)' }},
  564. xaxis: {{
  565. type: 'date',
  566. title: null,
  567. range: [sizeTrace.x[0], sizeTrace.x[sizeTrace.x.length - 1]]
  568. }}
  569. }}
  570. );
  571. }}
  572. // Create the line count chart
  573. const lineTrace = originalData.data.find(trace => trace.name === 'Line Count');
  574. if (lineTrace) {{
  575. Plotly.newPlot('lines-chart',
  576. [lineTrace],
  577. {{
  578. showlegend: false,
  579. height: 280,
  580. margin: {{ t: 10, b: 40, l: 50, r: 20 }},
  581. yaxis: {{ title: 'Total Lines' }},
  582. xaxis: {{
  583. type: 'date',
  584. title: null,
  585. range: [lineTrace.x[0], lineTrace.x[lineTrace.x.length - 1]]
  586. }}
  587. }}
  588. );
  589. }}
  590. // Create the tokens per second chart
  591. const tokensTrace = originalData.data.find(trace => trace.name === 'Tokens/Second');
  592. if (tokensTrace) {{
  593. Plotly.newPlot('tokens-chart',
  594. [tokensTrace],
  595. {{
  596. showlegend: false,
  597. height: 280,
  598. margin: {{ t: 10, b: 40, l: 50, r: 20 }},
  599. yaxis: {{ title: 'Tokens/Second' }},
  600. xaxis: {{
  601. type: 'date',
  602. title: null,
  603. range: [tokensTrace.x[0], tokensTrace.x[tokensTrace.x.length - 1]]
  604. }}
  605. }}
  606. );
  607. }}
  608. // Add debug logs to check axis names
  609. console.log('Size Chart Layout:', document.getElementById('size-chart').layout);
  610. console.log('Lines Chart Layout:', document.getElementById('lines-chart').layout);
  611. console.log('Tokens Chart Layout:', document.getElementById('tokens-chart').layout);
  612. }}
  613. function setQuickRange(range) {{
  614. let start, end = moment();
  615. switch(range) {{
  616. case '1h':
  617. start = moment().subtract(1, 'hours');
  618. break;
  619. case '6h':
  620. start = moment().subtract(6, 'hours');
  621. break;
  622. case '1d':
  623. start = moment().subtract(1, 'days');
  624. break;
  625. case '7d':
  626. start = moment().subtract(7, 'days');
  627. break;
  628. case '30d':
  629. start = moment().subtract(30, 'days');
  630. break;
  631. case 'all':
  632. start = moment(globalMinDate);
  633. end = moment(globalMaxDate);
  634. break;
  635. }}
  636. $('#daterange').data('daterangepicker').setStartDate(start);
  637. $('#daterange').data('daterangepicker').setEndDate(end);
  638. updatePlotRange(start.toISOString(), end.toISOString());
  639. }}
  640. function updatePlotRange(startDate, endDate) {{
  641. console.log('Updating range:', startDate, endDate);
  642. // Get the actual x-axis names from the chart layouts
  643. const sizeChartLayout = document.getElementById('size-chart').layout;
  644. const sizeXAxisName = Object.keys(sizeChartLayout).find(key => key.startsWith('xaxis'));
  645. const linesChartLayout = document.getElementById('lines-chart').layout;
  646. const linesXAxisName = Object.keys(linesChartLayout).find(key => key.startsWith('xaxis'));
  647. const tokensChartLayout = document.getElementById('tokens-chart').layout;
  648. const tokensXAxisName = Object.keys(tokensChartLayout).find(key => key.startsWith('xaxis'));
  649. // Update the ranges
  650. const sizeUpdateLayout = {{}};
  651. sizeUpdateLayout[`{{sizeXAxisName}}.range`] = [startDate, endDate];
  652. const linesUpdateLayout = {{}};
  653. linesUpdateLayout[`{{linesXAxisName}}.range`] = [startDate, endDate];
  654. const tokensUpdateLayout = {{}};
  655. tokensUpdateLayout[`{{tokensXAxisName}}.range`] = [startDate, endDate];
  656. // Update both charts
  657. Plotly.relayout('size-chart', sizeUpdateLayout)
  658. .catch(err => console.error('Error updating size chart:', err));
  659. Plotly.relayout('lines-chart', linesUpdateLayout)
  660. .catch(err => console.error('Error updating lines chart:', err));
  661. Plotly.relayout('tokens-chart', tokensUpdateLayout)
  662. .catch(err => console.error('Error updating tokens chart:', err));
  663. }}
  664. function findDateRange(data) {{
  665. let minDate = null;
  666. let maxDate = null;
  667. data.forEach(trace => {{
  668. if (trace.x && trace.x.length > 0) {{
  669. const dates = trace.x.map(d => new Date(d));
  670. const traceMin = new Date(Math.min(...dates));
  671. const traceMax = new Date(Math.max(...dates));
  672. if (!minDate || traceMin < minDate) minDate = traceMin;
  673. if (!maxDate || traceMax > maxDate) maxDate = traceMax;
  674. }}
  675. }});
  676. return {{ minDate, maxDate }};
  677. }}
  678. // Initialize everything when document is ready
  679. $(document).ready(function() {{
  680. // Initialize charts
  681. initializeCharts();
  682. // Find date range from data
  683. const {{ minDate, maxDate }} = findDateRange(originalData.data);
  684. globalMinDate = minDate;
  685. globalMaxDate = maxDate;
  686. // Initialize daterangepicker
  687. $('#daterange').daterangepicker({{
  688. startDate: minDate,
  689. endDate: maxDate,
  690. minDate: minDate,
  691. maxDate: maxDate,
  692. timePicker: true,
  693. timePicker24Hour: true,
  694. timePickerIncrement: 1,
  695. opens: 'center',
  696. locale: {{
  697. format: 'YYYY-MM-DD HH:mm',
  698. applyLabel: "Apply",
  699. cancelLabel: "Cancel",
  700. customRangeLabel: "Custom Range"
  701. }},
  702. ranges: {{
  703. 'Last Hour': [moment().subtract(1, 'hours'), moment()],
  704. 'Last 6 Hours': [moment().subtract(6, 'hours'), moment()],
  705. 'Last 24 Hours': [moment().subtract(1, 'days'), moment()],
  706. 'Last 7 Days': [moment().subtract(7, 'days'), moment()],
  707. 'Last 30 Days': [moment().subtract(30, 'days'), moment()],
  708. 'All Time': [moment(minDate), moment(maxDate)]
  709. }}
  710. }});
  711. // Update plots when date range changes
  712. $('#daterange').on('apply.daterangepicker', function(ev, picker) {{
  713. console.log('Date range changed:', picker.startDate.toISOString(), picker.endDate.toISOString());
  714. updatePlotRange(picker.startDate.toISOString(), picker.endDate.toISOString());
  715. }});
  716. // Add click handlers for charts
  717. ['size-chart', 'lines-chart', 'tokens-chart'].forEach(chartId => {{
  718. const chart = document.getElementById(chartId);
  719. if (chart) {{
  720. chart.on('plotly_click', function(data) {{
  721. const point = data.points[0];
  722. if (point.customdata && point.customdata[1]) {{
  723. window.open(point.customdata[1], '_blank');
  724. }}
  725. }});
  726. }}
  727. }});
  728. // Add debug logging for chart initialization
  729. console.log('Size Chart:', document.getElementById('size-chart'));
  730. console.log('Lines Chart:', document.getElementById('lines-chart'));
  731. console.log('Tokens Chart:', document.getElementById('tokens-chart'));
  732. }});
  733. </script>
  734. </body>
  735. </html>
  736. """
  737. # Write the dashboard
  738. dashboard_path = output_dir / "dashboard.html"
  739. with open(dashboard_path, "w") as f:
  740. f.write(dashboard_html)
  741. # Generate summary with available metrics
  742. latest_data = {}
  743. if not df_size.empty:
  744. latest = df_size.iloc[-1]
  745. previous = df_size.iloc[-2] if len(df_size) > 1 else latest
  746. size_change = float(latest['total_size_mb'] - previous['total_size_mb'])
  747. latest_data.update({
  748. 'timestamp': latest['timestamp'].isoformat(),
  749. 'commit_hash': latest['commit_hash'],
  750. 'commit_url': latest['commit_url'],
  751. 'total_size_mb': float(latest['total_size_mb']),
  752. 'size_change_mb': size_change,
  753. 'packages': latest.get('packages', [])
  754. })
  755. if not df_lines.empty:
  756. latest = df_lines.iloc[-1]
  757. previous = df_lines.iloc[-2] if len(df_lines) > 1 else latest
  758. linecount_change = int(latest['total_lines'] - previous['total_lines'])
  759. if not latest_data: # Only add timestamp and commit info if not already added
  760. latest_data.update({
  761. 'timestamp': latest['timestamp'].isoformat(),
  762. 'commit_hash': latest['commit_hash'],
  763. 'commit_url': latest['commit_url'],
  764. })
  765. latest_data.update({
  766. 'total_lines': int(latest['total_lines']),
  767. 'linecount_change': linecount_change
  768. })
  769. if not df_benchmark.empty:
  770. latest = df_benchmark.iloc[-1]
  771. previous = df_benchmark.iloc[-2] if len(df_benchmark) > 1 else latest
  772. tokens_change = float(latest['tokens_per_second'] - previous['tokens_per_second'])
  773. if not latest_data: # Only add timestamp and commit info if not already added
  774. latest_data.update({
  775. 'timestamp': latest['timestamp'].isoformat(),
  776. 'commit_hash': latest['commit_hash'],
  777. 'commit_url': latest['commit_url'],
  778. })
  779. latest_data.update({
  780. 'tokens_per_second': float(latest['tokens_per_second']),
  781. 'tokens_change': tokens_change
  782. })
  783. if latest_data:
  784. with open(output_dir / 'latest_data.json', 'w') as f:
  785. json.dump(latest_data, f, indent=2)
  786. self._print_summary(latest_data)
  787. self.logger.info(f"Report generated in {output_dir}")
  788. return str(output_dir)
  789. return None
  790. def _print_summary(self, latest_data: Dict):
  791. print("\n=== Package Size Summary ===")
  792. print(f"Timestamp: {latest_data['timestamp']}")
  793. print(f"Commit: {latest_data['commit_hash'][:7]}")
  794. if 'total_size_mb' in latest_data:
  795. print(f"Total Size: {latest_data['total_size_mb']:.2f}MB")
  796. change = latest_data['size_change_mb']
  797. change_symbol = "↓" if change <= 0 else "↑"
  798. print(f"Change: {change_symbol} {abs(change):.2f}MB")
  799. if latest_data.get('packages'):
  800. print("\nTop 5 Largest Packages:")
  801. sorted_packages = sorted(latest_data['packages'], key=lambda x: x['size_mb'], reverse=True)
  802. for pkg in sorted_packages[:5]:
  803. print(f"- {pkg['name']}: {pkg['size_mb']:.2f}MB")
  804. if 'total_lines' in latest_data:
  805. print("\nLine Count Stats:")
  806. print(f"Total Lines: {latest_data['total_lines']:,}")
  807. change = latest_data['linecount_change']
  808. change_symbol = "↓" if change <= 0 else "↑"
  809. print(f"Change: {change_symbol} {abs(change):,}")
  810. if 'tokens_per_second' in latest_data:
  811. print("\nBenchmark Stats:")
  812. print(f"Tokens per Second: {latest_data['tokens_per_second']:.2f}")
  813. if 'time_to_first_token' in latest_data:
  814. print(f"Time to First Token: {latest_data['time_to_first_token']:.3f}s")
  815. print("\n")
  816. def _calculate_data_hash(self, data: List[Dict]) -> str:
  817. """Calculate a hash of the data to detect changes"""
  818. return hash(str(sorted([
  819. (d.get('commit_hash'), d.get('timestamp'))
  820. for d in data
  821. ])))
  822. def _play_sound(self, sound_key: str):
  823. """Play a specific notification sound using pygame"""
  824. try:
  825. sound_path = self.sounds.get(sound_key)
  826. if sound_path and sound_path.exists():
  827. sound = pygame.mixer.Sound(str(sound_path))
  828. sound.play()
  829. # Wait for the sound to finish playing
  830. pygame.time.wait(int(sound.get_length() * 1000))
  831. else:
  832. self.logger.warning(f"Sound file not found: {sound_key} at {sound_path}")
  833. except Exception as e:
  834. self.logger.error(f"Failed to play sound {sound_key}: {e}")
  835. def _check_metrics_changes(self, current_data: List[Dict], previous_data: List[Dict]):
  836. # Sort data by timestamp in descending order (most recent first)
  837. def sort_by_timestamp(data):
  838. return sorted(
  839. data,
  840. key=lambda x: x.get('timestamp', ''),
  841. reverse=True # Most recent first
  842. )
  843. current_data = sort_by_timestamp(current_data)
  844. previous_data = sort_by_timestamp(previous_data)
  845. # Helper to find latest entry with a specific metric
  846. def find_latest_with_metric(data: List[Dict], metric: str) -> Optional[Dict]:
  847. return next((d for d in data if metric in d), None)
  848. # Check line count changes
  849. current_lines = find_latest_with_metric(current_data, 'total_lines')
  850. previous_lines = find_latest_with_metric(previous_data, 'total_lines')
  851. if current_lines and previous_lines:
  852. diff = current_lines['total_lines'] - previous_lines['total_lines']
  853. self.logger.debug(f"Lines of code diff: {diff}")
  854. if diff > 0:
  855. self.logger.info(f"Lines of code increased by {diff:,}")
  856. self._play_sound('lines_up')
  857. elif diff < 0:
  858. self.logger.info(f"Lines of code decreased by {abs(diff):,}")
  859. self._play_sound('lines_down')
  860. else:
  861. self.logger.debug("No lines of code data found")
  862. # Check tokens per second changes
  863. current_tokens = find_latest_with_metric(current_data, 'tokens_per_second')
  864. previous_tokens = find_latest_with_metric(previous_data, 'tokens_per_second')
  865. if current_tokens and previous_tokens:
  866. diff = current_tokens['tokens_per_second'] - previous_tokens['tokens_per_second']
  867. self.logger.debug(f"Tokens per second diff: {diff}")
  868. if diff > 0:
  869. self.logger.info(f"Tokens per second increased by {diff:.2f}")
  870. self._play_sound('tokens_up')
  871. elif diff < 0:
  872. self.logger.info(f"Tokens per second decreased by {abs(diff):.2f}")
  873. self._play_sound('tokens_down')
  874. else:
  875. self.logger.debug("No tokens per second data found")
  876. # Check package size changes
  877. current_size = find_latest_with_metric(current_data, 'total_size_mb')
  878. previous_size = find_latest_with_metric(previous_data, 'total_size_mb')
  879. if current_size and previous_size:
  880. diff = current_size['total_size_mb'] - previous_size['total_size_mb']
  881. self.logger.debug(f"Package size diff: {diff:.2f}MB")
  882. if diff > 0:
  883. self.logger.info(f"Package size increased by {diff:.2f}MB")
  884. self._play_sound('size_up')
  885. elif diff < 0:
  886. self.logger.info(f"Package size decreased by {abs(diff):.2f}MB")
  887. self._play_sound('size_down')
  888. else:
  889. self.logger.debug("No package size data found")
  890. async def run_dashboard(self, update_interval: int = 10):
  891. """Run the dashboard with periodic updates"""
  892. try:
  893. update_interval = float(update_interval)
  894. self.logger.debug(f"Update interval type: {type(update_interval)}, value: {update_interval}")
  895. except ValueError as e:
  896. self.logger.error(f"Failed to convert update_interval to float: {update_interval}")
  897. raise
  898. self.logger.info(f"Starting real-time dashboard with {update_interval}s updates")
  899. previous_data = None
  900. while True:
  901. try:
  902. start_time = time.time()
  903. # Collect new data
  904. current_data = await self.collect_data()
  905. if not current_data:
  906. self.logger.warning("No data collected")
  907. await asyncio.sleep(update_interval)
  908. continue
  909. # Generate report
  910. report_path = self.generate_report(current_data)
  911. if report_path:
  912. self.logger.info(
  913. f"Dashboard updated at {datetime.now().strftime('%H:%M:%S')}"
  914. )
  915. print("Curr:", len(current_data))
  916. print("Prev:", len(previous_data) if previous_data else "None")
  917. if previous_data:
  918. # Check for metric changes and play appropriate sounds
  919. self.logger.debug(f"Checking metrics changes between {len(current_data)} current and {len(previous_data)} previous data points")
  920. self._check_metrics_changes(current_data, previous_data)
  921. # Update previous data
  922. previous_data = current_data.copy() # Make a copy to prevent reference issues
  923. # Calculate sleep time
  924. elapsed = float(time.time() - start_time)
  925. sleep_time = max(0.0, update_interval - elapsed)
  926. await asyncio.sleep(sleep_time)
  927. except Exception as e:
  928. self.logger.error(f"Error in dashboard update loop: {e}", exc_info=True)
  929. if self.debug:
  930. raise
  931. await asyncio.sleep(update_interval)
  932. async def main():
  933. token = os.getenv("CIRCLECI_TOKEN")
  934. project_slug = os.getenv("CIRCLECI_PROJECT_SLUG")
  935. debug = os.getenv("DEBUG", "").lower() in ("true", "1", "yes")
  936. try:
  937. # Get update interval from environment or use default
  938. update_interval = float(os.getenv("UPDATE_INTERVAL", "10"))
  939. print(f"Update interval type: {type(update_interval)}, value: {update_interval}") # Debug print
  940. except ValueError as e:
  941. print(f"Error converting UPDATE_INTERVAL to float: {os.getenv('UPDATE_INTERVAL')}")
  942. update_interval = 10.0
  943. if not token or not project_slug:
  944. print("Error: Please set CIRCLECI_TOKEN and CIRCLECI_PROJECT_SLUG environment variables")
  945. return
  946. tracker = PackageSizeTracker(token, project_slug, debug)
  947. try:
  948. await tracker.run_dashboard(update_interval)
  949. except KeyboardInterrupt:
  950. print("\nDashboard stopped by user")
  951. except Exception as e:
  952. logging.error(f"Error: {str(e)}", exc_info=True)
  953. if debug:
  954. raise
  955. if __name__ == "__main__":
  956. asyncio.run(main())