react-github-timeline
Version:
3D visualization of GitHub repository evolution over time using React Three Fiber
1,655 lines (1,653 loc) ⢠83.9 kB
JavaScript
var Se = Object.defineProperty;
var ve = (o, t, e) => t in o ? Se(o, t, { enumerable: !0, configurable: !0, writable: !0, value: e }) : o[t] = e;
var T = (o, t, e) => ve(o, typeof t != "symbol" ? t + "" : t, e);
import { jsx as l, jsxs as f, Fragment as we } from "react/jsx-runtime";
import { forwardRef as X, createElement as W, useRef as _, useEffect as A, useReducer as Ce, useCallback as D, useState as z, useImperativeHandle as be, memo as ie } from "react";
import { Sphere as xe, Text as Te, OrbitControls as Ee } from "@react-three/drei";
import { useFrame as Ne, Canvas as Re } from "@react-three/fiber";
import * as I from "three";
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Me = (o) => o.replace(/([a-z0-9])([A-Z])/g, "$1-$2").toLowerCase(), _e = (o) => o.replace(
/^([A-Z])|[\s-_]+(\w)/g,
(t, e, s) => s ? s.toUpperCase() : e.toLowerCase()
), te = (o) => {
const t = _e(o);
return t.charAt(0).toUpperCase() + t.slice(1);
}, ne = (...o) => o.filter((t, e, s) => !!t && t.trim() !== "" && s.indexOf(t) === e).join(" ").trim(), ke = (o) => {
for (const t in o)
if (t.startsWith("aria-") || t === "role" || t === "title")
return !0;
};
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
var Ae = {
xmlns: "http://www.w3.org/2000/svg",
width: 24,
height: 24,
viewBox: "0 0 24 24",
fill: "none",
stroke: "currentColor",
strokeWidth: 2,
strokeLinecap: "round",
strokeLinejoin: "round"
};
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Le = X(
({
color: o = "currentColor",
size: t = 24,
strokeWidth: e = 2,
absoluteStrokeWidth: s,
className: a = "",
children: r,
iconNode: i,
...n
}, m) => W(
"svg",
{
ref: m,
...Ae,
width: t,
height: t,
stroke: o,
strokeWidth: s ? Number(e) * 24 / Number(t) : e,
className: ne("lucide", a),
...!r && !ke(n) && { "aria-hidden": "true" },
...n
},
[
...i.map(([d, c]) => W(d, c)),
...Array.isArray(r) ? r : [r]
]
)
);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const N = (o, t) => {
const e = X(
({ className: s, ...a }, r) => W(Le, {
ref: r,
iconNode: t,
className: ne(
`lucide-${Me(te(o))}`,
`lucide-${o}`,
s
),
...a
})
);
return e.displayName = te(o), e;
};
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const ze = [
["path", { d: "m12 19-7-7 7-7", key: "1l729n" }],
["path", { d: "M19 12H5", key: "x3x0zl" }]
], Ie = N("arrow-left", ze);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const De = [["path", { d: "m6 9 6 6 6-6", key: "qrunsl" }]], $e = N("chevron-down", De);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Oe = [["path", { d: "m18 15-6-6-6 6", key: "153udz" }]], Fe = N("chevron-up", Oe);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Pe = [
["path", { d: "m11 17-5-5 5-5", key: "13zhaf" }],
["path", { d: "m18 17-5-5 5-5", key: "h8a8et" }]
], Ge = N("chevrons-left", Pe);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Ue = [
["path", { d: "m6 17 5-5-5-5", key: "xnjwq" }],
["path", { d: "m13 17 5-5-5-5", key: "17xmmf" }]
], He = N("chevrons-right", Ue);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const je = [
["circle", { cx: "12", cy: "12", r: "10", key: "1mglay" }],
["line", { x1: "12", x2: "12", y1: "8", y2: "12", key: "1pkeuh" }],
["line", { x1: "12", x2: "12.01", y1: "16", y2: "16", key: "4dfq90" }]
], se = N("circle-alert", je);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const qe = [
["path", { d: "M21.801 10A10 10 0 1 1 17 3.335", key: "yps3ct" }],
["path", { d: "m9 11 3 3L22 4", key: "1pflzl" }]
], We = N("circle-check-big", qe);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Be = [
[
"path",
{ d: "M12 6a2 2 0 0 1 3.414-1.414l6 6a2 2 0 0 1 0 2.828l-6 6A2 2 0 0 1 12 18z", key: "b19h5q" }
],
[
"path",
{ d: "M2 6a2 2 0 0 1 3.414-1.414l6 6a2 2 0 0 1 0 2.828l-6 6A2 2 0 0 1 2 18z", key: "h7h5ge" }
]
], Ke = N("fast-forward", Be);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Xe = [["path", { d: "M21 12a9 9 0 1 1-6.219-8.56", key: "13zald" }]], Ve = N("loader-circle", Xe);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Ye = [
["rect", { x: "14", y: "3", width: "5", height: "18", rx: "1", key: "kaeet6" }],
["rect", { x: "5", y: "3", width: "5", height: "18", rx: "1", key: "1wsw3u" }]
], Je = N("pause", Ye);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const Ze = [
[
"path",
{
d: "M5 5a2 2 0 0 1 3.008-1.728l11.997 6.998a2 2 0 0 1 .003 3.458l-12 7A2 2 0 0 1 5 19z",
key: "10ikf1"
}
]
], Qe = N("play", Ze);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const et = [
["path", { d: "M3 12a9 9 0 0 1 9-9 9.75 9.75 0 0 1 6.74 2.74L21 8", key: "v9h5vc" }],
["path", { d: "M21 3v5h-5", key: "1q7to0" }],
["path", { d: "M21 12a9 9 0 0 1-9 9 9.75 9.75 0 0 1-6.74-2.74L3 16", key: "3uifl3" }],
["path", { d: "M8 16H3v5", key: "1cv678" }]
], tt = N("refresh-cw", et);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const st = [
[
"path",
{ d: "M12 6a2 2 0 0 0-3.414-1.414l-6 6a2 2 0 0 0 0 2.828l6 6A2 2 0 0 0 12 18z", key: "2a1g8i" }
],
[
"path",
{ d: "M22 6a2 2 0 0 0-3.414-1.414l-6 6a2 2 0 0 0 0 2.828l6 6A2 2 0 0 0 22 18z", key: "rg3s36" }
]
], rt = N("rewind", st);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const at = [
["path", { d: "M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8", key: "1357e3" }],
["path", { d: "M3 3v5h5", key: "1xhq8a" }]
], ot = N("rotate-ccw", at);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const it = [
["path", { d: "M21 4v16", key: "7j8fe9" }],
[
"path",
{
d: "M6.029 4.285A2 2 0 0 0 3 6v12a2 2 0 0 0 3.029 1.715l9.997-5.998a2 2 0 0 0 .003-3.432z",
key: "zs4d6"
}
]
], nt = N("skip-forward", it);
/**
* @license lucide-react v0.552.0 - ISC
*
* This source code is licensed under the ISC license.
* See the LICENSE file in the root directory of this source tree.
*/
const ct = [
[
"path",
{
d: "M17.971 4.285A2 2 0 0 1 21 6v12a2 2 0 0 1-3.029 1.715l-9.997-5.998a2 2 0 0 1-.003-3.432z",
key: "15892j"
}
],
["path", { d: "M3 20V4", key: "1ptbpl" }]
], lt = N("skip-back", ct), ht = !1;
function mt({
isPlaying: o,
playbackSpeed: t,
playbackDirection: e,
timeRange: s,
hasCommits: a,
onTimeChange: r,
onPlayingChange: i
}) {
const n = _(null);
A(() => {
if (o && a) {
const d = 100 * t;
return n.current = setInterval(() => {
r((c) => {
let h;
if (e === "forward") {
if (h = c + d, h >= s.end)
return i(!1), s.end;
} else if (h = c - d, h <= s.start)
return i(!1), s.start;
return h;
});
}, 100), () => {
n.current && clearInterval(n.current);
};
}
}, [
o,
t,
e,
a,
s.start,
s.end,
r,
i
]);
}
const dt = "2025-11-04-v2";
function B(o) {
console.log(`šØ buildFileTree called with ${o.length} files`);
const t = [], e = /* @__PURE__ */ new Map(), s = /* @__PURE__ */ new Set();
if (o.forEach((a) => {
const r = {
id: a.path,
path: a.path,
name: a.path.split("/").pop() || a.path,
size: a.size,
type: a.type || "file"
};
t.push(r), e.set(a.path, r);
const i = a.path.split("/");
for (let n = 1; n < i.length; n++) {
const m = i.slice(0, n).join("/");
s.add(m);
}
}), s.forEach((a) => {
if (!e.has(a)) {
const r = {
id: a,
path: a,
name: a.split("/").pop() || a,
size: 0,
// Directories have no size
type: "directory"
};
t.push(r), e.set(a, r);
}
}), o.length > 0 && !e.has("/")) {
const a = {
id: "/",
path: "/",
name: "root",
size: 0,
type: "directory"
};
t.push(a), e.set("/", a), console.log("ā Created root node");
} else o.length === 0 ? console.log("ā No root node: no files") : e.has("/") && console.log("ā No root node: already exists in pathMap");
return t;
}
function K(o) {
console.log(`š buildEdges called with ${o.length} files`);
const t = [], e = /* @__PURE__ */ new Set();
o.forEach((a) => {
const r = a.path.split("/");
if (r.length > 1) {
const i = r.slice(0, -1).join("/");
console.log(` File edge: ${i} ā ${a.path}`), t.push({
source: i,
target: a.path,
type: "parent"
});
for (let n = 0; n < r.length - 1; n++) {
const m = r.slice(0, n + 1).join("/"), d = n > 0 ? r.slice(0, n).join("/") : "", c = `${d || "/"}->${m}`;
e.has(c) || (e.add(c), d === "" ? t.push({
source: "/",
target: m,
type: "parent"
}) : t.push({
source: d,
target: m,
type: "parent"
}));
}
} else
t.push({
source: "/",
target: a.path,
type: "parent"
});
});
const s = t.filter((a) => a.source === "/");
return console.log(
` ā Created ${t.length} total edges, ${s.length} from root`
), s.length > 0 && console.log(
" Root edge targets:",
s.map((a) => a.target)
), t;
}
function P(o, t, e, s, a) {
const r = a.getFileData(), i = B(r), n = K(r), m = /* @__PURE__ */ new Set([
...n.map((c) => c.target),
...n.map((c) => c.source)
]), d = i.filter((c) => !m.has(c.id));
return d.length > 0 && (console.warn(
`ā ļø Commit ${o} has ${d.length} orphaned nodes:`,
d.map((c) => `${c.path} (${c.type})`)
), console.warn(" File data:", r), console.warn(
" Nodes:",
i.map((c) => `${c.path} (${c.type})`)
), console.warn(
" Edges:",
n.map((c) => `${c.source} ā ${c.target}`)
)), {
hash: o,
message: t,
author: e,
date: new Date(s),
files: i,
edges: n
};
}
class G {
constructor() {
T(this, "fileState", /* @__PURE__ */ new Map());
}
/**
* Update file state based on PR file changes
*/
updateFromPRFiles(t) {
for (const e of t)
if (e.status === "removed")
this.fileState.delete(e.filename);
else if (e.status === "renamed" && e.previous_filename) {
const s = this.fileState.get(e.previous_filename) || 0;
this.fileState.delete(e.previous_filename), this.fileState.set(
e.filename,
s + e.additions - e.deletions
);
} else {
const s = this.fileState.get(e.filename) || 0;
this.fileState.set(
e.filename,
s + e.additions - e.deletions
);
}
}
/**
* Get current file state as an array of [path, size] tuples
*/
getFileState() {
return Array.from(this.fileState.entries());
}
/**
* Get file data suitable for building file tree
*/
getFileData() {
return this.getFileState().filter(([, t]) => t > 0).map(([t, e]) => ({
path: t,
size: e
}));
}
/**
* Clear all tracked state
*/
clear() {
this.fileState.clear();
}
}
class ut {
constructor(t, e, s) {
T(this, "workerUrl");
T(this, "owner");
T(this, "repo");
this.workerUrl = t, this.owner = e, this.repo = s;
}
/**
* Fetch metadata from Cloudflare Worker (fast, all commits without files)
*/
async fetchMetadata() {
const t = `${this.workerUrl}/api/repo/${this.owner}/${this.repo}/metadata`, e = await fetch(t);
if (!e.ok) {
const a = await e.json().catch(() => ({ error: "Unknown error" }));
throw new Error(
a.error || `Worker request failed: ${e.status}`
);
}
return await e.json();
}
/**
* Fetch cache status from Cloudflare Worker (instant!)
*/
async fetchCacheStatus() {
const t = `${this.workerUrl}/api/repo/${this.owner}/${this.repo}/cache`, e = await fetch(t);
if (!e.ok) {
const r = await e.json().catch(() => ({ error: "Unknown error" }));
throw new Error(
r.error || `Cache status request failed: ${e.status}`
);
}
const s = await e.json();
return {
cache: s.cache,
status: s.status
};
}
/**
* Fetch repo summary from GitHub (fast, just first page)
*/
async fetchRepoSummary() {
const t = `${this.workerUrl}/api/repo/${this.owner}/${this.repo}/summary`, e = await fetch(t);
if (!e.ok) {
const a = await e.json().catch(() => ({ error: "Unknown error" }));
throw new Error(
a.error || `Summary request failed: ${e.status}`
);
}
return { github: (await e.json()).github };
}
/**
* Fetch a single PR with files from Cloudflare Worker (instant from cache!)
*/
async fetchSinglePR(t) {
const e = `${this.workerUrl}/api/repo/${this.owner}/${this.repo}/pr/${t}`, s = await fetch(e);
if (s.status === 404)
return null;
if (!s.ok) {
const r = await s.json().catch(() => ({ error: "Unknown error" }));
throw new Error(
r.error || `Single PR request failed: ${s.status}`
);
}
return await s.json();
}
/**
* Fetch commit data from Cloudflare Worker with pagination
*/
async fetchCommits(t = 0, e = 40) {
const s = `${this.workerUrl}/api/repo/${this.owner}/${this.repo}?offset=${t}&limit=${e}`, a = await fetch(s);
if (!a.ok) {
const c = await a.json().catch(() => ({ error: "Unknown error" }));
throw new Error(
c.error || `Worker request failed: ${a.status}`
);
}
const r = await a.json(), i = Number.parseInt(
a.headers.get("X-Total-Count") || "0",
10
), n = a.headers.get("X-Has-More") === "true", m = Number.parseInt(
a.headers.get("X-Offset") || "0",
10
), d = Number.parseInt(
a.headers.get("X-Limit") || "40",
10
);
return {
commits: r,
totalCount: i,
hasMore: n,
offset: m,
limit: d
};
}
}
console.log(`š¦ Loaded fileTreeBuilder version: ${dt}`);
class H {
constructor(t, e, s) {
T(this, "owner");
T(this, "repo");
T(this, "baseUrl", "https://api.github.com");
T(this, "requestDelay", 1e3);
// 1 second between requests to avoid rate limiting
T(this, "token");
T(this, "lastRateLimit", null);
T(this, "workerService");
const [a, r] = t.split("/");
this.owner = a, this.repo = r, this.token = e, s && (this.workerService = new ut(s, a, r));
}
/**
* Check if we should use the worker for this request
*/
shouldUseWorker() {
return !!this.workerService;
}
/**
* Fetch metadata from Cloudflare Worker (fast, all commits without files)
*/
async fetchMetadata() {
if (!this.workerService)
throw new Error("Worker URL not configured");
return this.workerService.fetchMetadata();
}
/**
* Fetch cache status from Cloudflare Worker (instant!)
*/
async fetchCacheStatus() {
if (!this.workerService)
throw new Error("Worker URL required for cache status");
return this.workerService.fetchCacheStatus();
}
/**
* Fetch repo summary from GitHub (fast, just first page)
*/
async fetchRepoSummary() {
if (!this.workerService)
throw new Error("Worker URL required for summary");
return this.workerService.fetchRepoSummary();
}
/**
* Fetch a single PR with files from Cloudflare Worker (instant from cache!)
*/
async fetchSinglePR(t) {
if (!this.workerService)
throw new Error("Worker URL required for single PR fetch");
return this.workerService.fetchSinglePR(t);
}
/**
* Fetch commit data from Cloudflare Worker
*/
async fetchCommitsFromWorker(t = 0, e = 40) {
if (!this.workerService)
throw new Error("Worker URL not configured");
return this.workerService.fetchCommits(t, e);
}
getRateLimitInfo() {
return this.lastRateLimit;
}
/**
* Fetch repository metadata to get default branch name
*/
async fetchRepoInfo() {
return this.fetchGitHub(
`/repos/${this.owner}/${this.repo}`
);
}
/**
* Sleep for a specified duration
*/
sleep(t) {
return new Promise((e) => setTimeout(e, t));
}
/**
* Make a GitHub API request with error handling
*/
async fetchGitHub(t, e = {}) {
const s = `${this.baseUrl}${t}`, a = {
Accept: "application/vnd.github.v3+json"
};
this.token && (a.Authorization = `Bearer ${this.token}`);
const r = await fetch(s, {
...e,
headers: {
...a,
...e.headers
}
}), i = r.headers.get("X-RateLimit-Remaining"), n = r.headers.get("X-RateLimit-Limit"), m = r.headers.get("X-RateLimit-Reset");
if (i && n && m && (this.lastRateLimit = {
remaining: Number.parseInt(i),
limit: Number.parseInt(n),
resetTime: new Date(Number.parseInt(m) * 1e3)
}), !r.ok) {
if (r.status === 403 && i === "0") {
const d = m ? new Date(Number.parseInt(m) * 1e3) : /* @__PURE__ */ new Date();
throw new Error(
`GitHub API rate limit exceeded. Resets at ${d.toLocaleTimeString()}. Unauthenticated limit is 60/hour. For higher limits, wait or contact the developer.`
);
}
throw r.status === 404 ? new Error(
`Unable to access repository: ${this.owner}/${this.repo}
This could mean:
⢠The repository is private (GitHub returns 404 for private repos)
⢠The repository doesn't exist
⢠The repository name is misspelled
To access private repositories, authentication is required but not currently supported in this demo.`
) : new Error(
`GitHub API error: ${r.status} ${r.statusText}`
);
}
return r.json();
}
/**
* Fetch all merged pull requests for the repository
*/
async fetchMergedPRs(t) {
const e = [];
let s = 1;
const a = 100;
for (t && t({
loaded: 0,
total: -1,
percentage: 0,
message: "Fetching pull requests..."
}); ; ) {
const r = await this.fetchGitHub(
`/repos/${this.owner}/${this.repo}/pulls?state=closed&per_page=${a}&page=${s}&sort=created&direction=asc`
);
if (r.length === 0) break;
const i = r.filter((n) => n.merged_at !== null);
if (e.push(...i), t && t({
loaded: e.length,
total: -1,
// Unknown total
percentage: 0,
message: `Found ${e.length} merged pull requests...`
}), r.length < a) break;
s++, await this.sleep(this.requestDelay);
}
return e;
}
/**
* Fetch file changes for a specific PR
*/
async fetchPRFiles(t) {
const e = [];
let s = 1;
const a = 100;
for (; ; ) {
const r = await this.fetchGitHub(
`/repos/${this.owner}/${this.repo}/pulls/${t}/files?per_page=${a}&page=${s}`
);
if (r.length === 0 || (e.push(...r), r.length < a)) break;
s++, await this.sleep(this.requestDelay);
}
return e;
}
/**
* Load additional commits from worker with pagination
* Returns processed commits and pagination info
*/
async loadMoreCommits(t, e = 40, s = /* @__PURE__ */ new Map(), a, r) {
if (!this.shouldUseWorker())
throw new Error("loadMoreCommits requires worker URL");
r && r({
loaded: 0,
total: -1,
percentage: 0,
message: `Fetching commits ${t}-${t + e}...`
});
const i = await this.fetchCommitsFromWorker(t, e);
r && r({
loaded: i.commits.length,
total: i.commits.length,
percentage: 50,
message: `Processing ${i.commits.length} additional commits...`
});
const n = [], m = new G();
for (const [d, c] of s.entries())
m.updateFromPRFiles([
{
filename: d,
status: "added",
additions: c,
deletions: 0,
changes: c
}
]);
for (let d = 0; d < i.commits.length; d++) {
const c = i.commits[d];
if (r && r({
loaded: d + 1,
total: i.commits.length,
percentage: 50 + Math.round(d / i.commits.length * 50),
message: `Processing commit ${d + 1}/${i.commits.length}`
}), c.files && c.files.length > 0) {
const u = c.files.map((y) => ({
filename: y.filename,
status: y.status,
additions: y.additions,
deletions: y.deletions,
changes: y.changes,
previous_filename: y.previous_filename
}));
m.updateFromPRFiles(u);
}
const h = P(
c.sha.substring(0, 7),
c.commit.message.split(`
`)[0],
c.commit.author.name,
c.commit.author.date,
m
);
n.push(h), a && a(h);
}
return {
commits: n,
hasMore: i.hasMore,
totalCount: i.totalCount
};
}
/**
* Build commit timeline from PRs incrementally
* Calls onCommit callback for each PR processed, allowing progressive rendering
*/
async buildTimelineFromPRsIncremental(t, e, s) {
if (this.shouldUseWorker()) {
e && e({
loaded: 0,
total: -1,
percentage: 0,
message: "Fetching data from cache..."
});
const n = await this.fetchCommitsFromWorker();
console.log("[AUTOLOAD] Initial fetch response:", {
commits: n.commits.length,
totalCount: n.totalCount,
hasMore: n.hasMore
}), e && e({
loaded: n.commits.length,
total: n.totalCount,
percentage: 50,
message: `Loaded ${n.commits.length} commits from cache`
});
const m = [], d = new G();
for (let c = 0; c < n.commits.length; c++) {
const h = n.commits[c];
if (e && e({
loaded: c + 1,
total: n.commits.length,
percentage: 50 + Math.round(c / n.commits.length * 50),
message: `Processing commit ${c + 1}/${n.commits.length}`
}), h.files && h.files.length > 0) {
const y = h.files.map((p) => ({
filename: p.filename,
status: p.status,
additions: p.additions,
deletions: p.deletions,
changes: p.changes,
previous_filename: p.previous_filename
}));
d.updateFromPRFiles(y);
}
const u = P(
h.sha.substring(0, 7),
h.commit.message.split(`
`)[0],
h.commit.author.name,
h.commit.author.date,
d
);
m.push(u), t && t(u), s && (c % 5 === 0 || c === n.commits.length - 1) && s([...m]);
}
return {
commits: m,
hasMore: n.hasMore,
totalCount: n.totalCount
};
}
const a = await this.fetchMergedPRs((n) => {
e && e({
...n,
percentage: 10
});
});
if (a.length === 0)
return this.buildTimelineFromCommits(t, e);
const r = [], i = new G();
for (let n = 0; n < a.length; n++) {
const m = a[n];
e && e({
loaded: n + 1,
total: a.length,
percentage: 10 + Math.round(n / a.length * 90),
message: `Processing PR #${m.number}: ${m.title}`
});
const d = m.files || await this.fetchPRFiles(m.number);
i.updateFromPRFiles(d);
const c = P(
m.merge_commit_sha ? m.merge_commit_sha.substring(0, 7) : `pr-${m.number}`,
m.title,
m.user.login,
m.merged_at || new Date(Date.now()).toISOString(),
i
);
r.push(c), t && t(c), s && (n % 5 === 0 || n === a.length - 1) && s([...r]), n < a.length - 1 && await this.sleep(this.requestDelay);
}
return { commits: r };
}
/**
* Build commit timeline from commits directly (when no merged PRs exist)
* This fetches commits from the default branch and builds file state incrementally
*/
async buildTimelineFromCommits(t, e) {
const a = (await this.fetchRepoInfo()).default_branch;
e && e({
loaded: 0,
total: -1,
percentage: 0,
message: `Fetching commits from ${a} branch...`
});
const r = [];
let i = 1;
const n = 100, m = 100;
for (; r.length < m; ) {
const h = await this.fetchGitHub(
`/repos/${this.owner}/${this.repo}/commits?sha=${a}&per_page=${n}&page=${i}`
);
if (h.length === 0 || (r.push(...h.slice(0, m - r.length)), h.length < n)) break;
i++, await this.sleep(this.requestDelay);
}
if (r.length === 0)
throw new Error("No commits found. Repository may be empty or private.");
const d = [], c = new G();
for (let h = 0; h < r.length; h++) {
const u = r[h];
e && e({
loaded: h + 1,
total: r.length,
percentage: Math.round((h + 1) / r.length * 100),
message: `Processing commit ${h + 1}/${r.length}`
});
const y = await this.fetchGitHub(
`/repos/${this.owner}/${this.repo}/commits/${u.sha}`
);
if (y.files && y.files.length > 0) {
const g = y.files.map((S) => ({
filename: S.filename,
status: S.status,
additions: S.additions,
deletions: S.deletions,
changes: S.changes,
previous_filename: S.previous_filename
}));
c.updateFromPRFiles(g);
}
const p = P(
y.sha.substring(0, 7),
y.commit.message.split(`
`)[0],
y.commit.author.name,
y.commit.author.date,
c
);
d.push(p), t && t(p), h < r.length - 1 && await this.sleep(this.requestDelay);
}
return { commits: d };
}
/**
* Build commit timeline from PRs (non-incremental version for backward compatibility)
*/
async buildTimelineFromPRs(t) {
return (await this.buildTimelineFromPRsIncremental(
void 0,
t
)).commits;
}
}
const pt = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
__proto__: null,
GitHubApiService: H
}, Symbol.toStringTag, { value: "Module" })), re = 1, U = "github-timeline:", gt = 24 * 60 * 60 * 1e3;
class L {
static getStorageKey(t) {
return `${U}${t}`;
}
/**
* Save commit data to localStorage
*/
static saveCommits(t, e) {
try {
const s = {
repoKey: t,
commits: e,
lastUpdated: Date.now(),
version: re
}, a = JSON.stringify(s, (r, i) => i instanceof Date ? i.toISOString() : i);
return localStorage.setItem(this.getStorageKey(t), a), !0;
} catch (s) {
return console.error("Failed to save to localStorage:", s), s instanceof Error && s.name === "QuotaExceededError" && this.clearOldestCache(), !1;
}
}
/**
* Load commit data from localStorage
*/
static loadCommits(t) {
try {
const e = localStorage.getItem(this.getStorageKey(t));
if (!e)
return null;
const s = JSON.parse(e, (a, r) => typeof r == "string" && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(r) ? new Date(r) : r);
return s.version !== re ? (this.clearCache(t), null) : Date.now() - s.lastUpdated > gt ? (this.clearCache(t), null) : s.commits;
} catch (e) {
return console.error("Failed to load from localStorage:", e), null;
}
}
/**
* Clear cache for a specific repo
*/
static clearCache(t) {
try {
localStorage.removeItem(this.getStorageKey(t));
} catch (e) {
console.error("Failed to clear cache:", e);
}
}
/**
* Clear all repo timeline caches
*/
static clearAllCaches() {
try {
const t = Object.keys(localStorage);
for (const e of t)
e.startsWith(U) && localStorage.removeItem(e);
} catch (t) {
console.error("Failed to clear all caches:", t);
}
}
/**
* Get cache metadata
*/
static getCacheInfo(t) {
try {
const e = localStorage.getItem(this.getStorageKey(t));
if (!e)
return { exists: !1 };
const s = JSON.parse(e);
return {
exists: !0,
age: Date.now() - s.lastUpdated,
commitCount: s.commits.length
};
} catch {
return { exists: !1 };
}
}
/**
* Clear oldest cache when quota is exceeded
*/
static clearOldestCache() {
try {
const t = Object.keys(localStorage), e = [];
for (const s of t)
if (s.startsWith(U)) {
const a = JSON.parse(localStorage.getItem(s) || "{}");
a.lastUpdated && e.push({ key: s, timestamp: a.lastUpdated });
}
e.sort((s, a) => s.timestamp - a.timestamp), e.length > 0 && localStorage.removeItem(e[0].key);
} catch (t) {
console.error("Failed to clear oldest cache:", t);
}
}
/**
* Get storage usage statistics
*/
static getStorageStats() {
try {
const t = Object.keys(localStorage);
let e = 0, s = 0;
for (const a of t)
if (a.startsWith(U)) {
s++;
const r = localStorage.getItem(a);
r && (e += a.length + r.length);
}
return {
totalCaches: s,
estimatedSize: e * 2
// Rough estimate in bytes (UTF-16)
};
} catch {
return { totalCaches: 0, estimatedSize: 0 };
}
}
}
class ae {
constructor(t, e, s) {
T(this, "repoPath");
T(this, "token");
T(this, "workerUrl");
T(this, "githubService");
this.repoPath = t, this.token = e, this.workerUrl = s;
}
getRateLimitInfo() {
var t;
return ((t = this.githubService) == null ? void 0 : t.getRateLimitInfo()) || null;
}
/**
* Load more commits with pagination
*/
async loadMoreCommits(t, e = 40, s = /* @__PURE__ */ new Map(), a, r) {
this.githubService || (this.githubService = new H(
this.repoPath,
this.token,
this.workerUrl
));
const i = await this.githubService.loadMoreCommits(
t,
e,
s,
a,
r
), n = this.calculateSizeChanges(i.commits), m = this.getCacheKey(), c = [...L.loadCommits(m) || [], ...n];
return L.saveCommits(m, c), {
...i,
commits: n
};
}
/**
* Get repository status from worker (GitHub state + cache state)
*/
// Status methods removed - use new /cache and /summary endpoints directly
// via GitHubApiService.fetchCacheStatus() and GitHubApiService.fetchRepoSummary()
/**
* Fetch metadata for all PRs (fast, no files)
*/
async getMetadata() {
this.githubService = new H(
this.repoPath,
this.token,
this.workerUrl
);
const e = (await this.githubService.fetchMetadata()).map((r, i) => ({
number: i + 1,
// Use index as commit number since commits don't have PR numbers
title: r.message,
author: r.author,
date: new Date(r.date)
})), s = e.map((r) => r.date.getTime()), a = {
start: Math.min(...s),
end: Math.max(...s)
};
return { prs: e, timeRange: a };
}
/**
* Get a cache key for this repository
*/
getCacheKey() {
return this.repoPath.toLowerCase().replace(/[^a-z0-9]/g, "-");
}
/**
* Get commit history with caching and incremental loading
*/
async getCommitHistory(t, e = !1, s) {
const a = this.getCacheKey();
if (!e) {
const r = L.loadCommits(a);
if (r)
return { commits: r };
}
try {
const r = await this.fetchCommitsWithProgress(t, s);
return L.saveCommits(a, r.commits), r;
} catch (r) {
console.error("Error fetching commits:", r);
const i = L.loadCommits(a);
if (i)
return { commits: i };
throw r;
}
}
/**
* Fetch commits with progress reporting
*/
async fetchCommitsWithProgress(t, e) {
if (/^[^/]+\/[^/]+$/.test(this.repoPath))
try {
this.githubService = new H(
this.repoPath,
this.token,
this.workerUrl
);
const s = this.getCacheKey(), a = await this.githubService.buildTimelineFromPRsIncremental(
e ? (r) => {
const i = this.calculateSizeChanges([r]);
e(i[0]);
} : void 0,
t,
(r) => {
const i = this.calculateSizeChanges(r);
L.saveCommits(s, i);
}
);
return console.log("[AUTOLOAD] GitService initial load result:", {
commits: a.commits.length,
hasMore: a.hasMore,
totalCount: a.totalCount
}), {
commits: this.calculateSizeChanges(a.commits),
hasMore: a.hasMore,
totalCount: a.totalCount
};
} catch (s) {
throw console.error("GitHub API error:", s), s;
}
try {
const s = await fetch(
`/api/commits?path=${encodeURIComponent(this.repoPath)}`
);
if (!s.ok)
throw new Error("Failed to fetch commits");
const a = await s.json();
return { commits: this.parseCommitsWithProgress(a, t) };
} catch (s) {
throw new Error(
`Failed to fetch repository data. Please check the repository path and try again. ${s instanceof Error ? s.message : ""}`
);
}
}
/**
* Parse commits with progress reporting
*/
parseCommitsWithProgress(t, e) {
const s = [];
for (let a = 0; a < t.length; a++) {
const r = t[a];
s.push({
hash: r.hash,
message: r.message,
author: r.author,
date: new Date(r.date),
files: B(
r.files.map((i) => ({
path: i.path,
size: i.size || 100,
type: i.type
}))
),
edges: K(
r.files.map((i) => ({ path: i.path, size: i.size || 100 }))
)
}), e && a % 10 === 0 && e({
loaded: a + 1,
total: t.length,
percentage: Math.round((a + 1) / t.length * 100)
});
}
return this.calculateSizeChanges(s);
}
/**
* Clear cache for this repository
*/
clearCache() {
L.clearCache(this.getCacheKey());
}
/**
* Get cache information
*/
getCacheInfo() {
return L.getCacheInfo(this.getCacheKey());
}
calculateSizeChanges(t) {
for (let e = 0; e < t.length; e++)
if (e === 0)
t[e].files.forEach((s) => {
s.sizeChange = "unchanged", s.fileStatus = "added";
});
else {
const s = t[e - 1], a = new Map(
s.files.map((n) => [n.path, n])
), r = new Map(
t[e].files.map((n) => [n.path, n])
);
t[e].files.forEach((n) => {
const m = a.get(n.path);
if (m)
n.previousSize = m.size, n.fileStatus = "unchanged", n.size > m.size ? n.sizeChange = "increase" : n.size < m.size ? n.sizeChange = "decrease" : n.sizeChange = "unchanged";
else {
const d = n.path.split("/").pop();
let c = !1;
for (const [h, u] of a)
if (h.split("/").pop() === d && !r.has(h) && n.size === u.size) {
n.fileStatus = "moved", n.previousPath = h, n.previousSize = u.size, n.sizeChange = "unchanged", c = !0;
break;
}
c || (n.fileStatus = "added", n.sizeChange = "increase");
}
});
const i = [];
for (const [n, m] of a)
if (!r.has(n)) {
let d = !1;
for (const c of t[e].files)
if (c.previousPath === n) {
d = !0;
break;
}
if (!d) {
const c = {
...m,
size: 0,
previousSize: m.size,
fileStatus: "deleted",
sizeChange: "decrease"
};
t[e].files.push(c), i.push(c);
}
}
if (i.length > 0) {
const n = t[e].files.map((c) => ({
path: c.path,
size: c.size
})), m = B(n), d = new Map(
t[e].files.map((c) => [c.path, c])
);
t[e].files = m.map((c) => {
const h = d.get(c.path);
return h || c;
}), t[e].edges = K(n);
}
}
return t;
}
}
function ft(o, t) {
switch (t.type) {
case "SET_COMMITS": {
if (t.commits.length > 0) {
const e = t.commits.map((r) => r.date.getTime()), s = {
start: Math.min(...e),
end: Math.max(...e)
}, a = s.start;
return {
...o,
commits: t.commits,
timeRange: s,
currentTime: a
};
}
return { ...o, commits: t.commits };
}
case "ADD_COMMIT": {
const e = [...o.commits, t.commit], s = t.commit.date.getTime(), a = {
start: o.commits.length === 0 ? s : Math.min(s, o.timeRange.start),
end: o.commits.length === 0 ? s : Math.max(s, o.timeRange.end)
}, r = o.currentTime === 0 ? s : o.currentTime;
return {
...o,
commits: e,
timeRange: a,
currentTime: r
};
}
case "SET_CURRENT_TIME":
return { ...o, currentTime: t.time };
case "SET_TIME_RANGE":
return { ...o, timeRange: t.range };
case "SET_TOTAL_PRS":
return { ...o, totalPRs: t.count };
case "SET_LOADING":
return { ...o, loading: t.loading };
case "SET_BACKGROUND_LOADING":
return { ...o, backgroundLoading: t.loading };
case "SET_LOAD_PROGRESS":
return { ...o, loadProgress: t.progress };
case "SET_ERROR":
return { ...o, error: t.error };
case "SET_RATE_LIMIT":
return { ...o, rateLimit: t.rateLimit };
case "SET_FROM_CACHE":
return { ...o, fromCache: t.fromCache };
case "SET_RATE_LIMITED_CACHE":
return { ...o, rateLimitedCache: t.rateLimitedCache };
case "SET_REPO_STATUS":
return { ...o, repoStatus: t.status };
case "SET_CACHE_STATUS":
return { ...o, cacheStatus: t.status };
case "SET_REPO_SUMMARY":
return { ...o, repoSummary: t.summary };
case "SET_LOADING_STAGE":
return { ...o, loadingStage: t.stage };
case "RESET_COMMITS":
return { ...o, commits: [] };
case "SET_PAGINATION":
return {
...o,
hasMoreCommits: t.hasMore,
totalCommitsAvailable: t.totalAvailable
};
case "APPEND_COMMITS": {
const e = [...o.commits, ...t.commits], s = t.commits.map((r) => r.date.getTime()), a = {
start: Math.min(o.timeRange.start, ...s),
end: Math.max(o.timeRange.end, ...s)
};
return {
...o,
commits: e,
timeRange: a
};
}
default:
return o;
}
}
function yt({
repoPath: o,
githubToken: t,
workerUrl: e,
testMode: s = !1,
onError: a
}) {
const [r, i] = Ce(ft, {
commits: [],
currentTime: 0,
timeRange: { start: 0, end: Date.now() },
totalPRs: 0,
loading: !s,
backgroundLoading: !1,
loadProgress: null,
error: null,
rateLimit: null,
fromCache: !1,
rateLimitedCache: !1,
repoStatus: null,
cacheStatus: null,
repoSummary: null,
loadingStage: "initial",
hasMoreCommits: !1,
totalCommitsAvailable: 0
}), n = _(null);
A(() => {
if (!e || s) return;
(async () => {
i({ type: "SET_LOADING_STAGE", stage: "cache-check" });
try {
const { GitHubApiService: h } = await Promise.resolve().then(() => pt), u = new h(
o,
t,
e
), [y, p] = await Promise.all([
u.fetchCacheStatus(),
u.fetchRepoSummary()
]);
i({ type: "SET_CACHE_STATUS", status: y.cache }), i({ type: "SET_REPO_SUMMARY", summary: p.github });
const g = {
cache: y.cache,
github: p.github,
recommendation: y.status
};
i({ type: "SET_REPO_STATUS", status: g });
} catch (h) {
console.error("[Stage 1] Error loading instant feedback:", h);
}
})();
}, [o, e, s, t]), A(() => {
(async () => {
i({ type: "SET_LOADING_STAGE", stage: "metadata" });
try {
const u = await new ae(o, void 0, e).getMetadata();
i({ type: "SET_TOTAL_PRS", count: u.prs.length }), i({ type: "SET_TIME_RANGE", range: u.timeRange }), i({ type: "SET_CURRENT_TIME", time: u.timeRange.start });
} catch (h) {
console.error("[Stage 2] Error loading metadata:", h);
}
})();
}, [o, e]);
const m = D(
async (c = !1) => {
const h = new ae(
o,
void 0,
// No token needed - using worker
e
);
n.current = h;
const u = h.getCacheInfo();
if (u.exists && !c) {
i({ type: "SET_LOADING", loading: !0 }), i({ type: "SET_LOAD_PROGRESS", progress: null });
try {
const p = await h.getCommitHistory((g) => {
i({ type: "SET_LOAD_PROGRESS", progress: g });
}, c);
console.log("[AUTOLOAD] useRepoData received initial result:", {
commits: p.commits.length,
hasMore: p.hasMore,
totalCount: p.totalCount
}), i({ type: "SET_COMMITS", commits: p.commits }), p.hasMore !== void 0 && p.totalCount !== void 0 && i({
type: "SET_PAGINATION",
hasMore: p.hasMore,
totalAvailable: p.totalCount
}), i({ type: "SET_FROM_CACHE", fromCache: !0 }), i({ type: "SET_RATE_LIMITED_CACHE", rateLimitedCache: !1 }), i({ type: "SET_LOADING", loading: !1 }), i({ type: "SET_ERROR", error: null }), i({ type: "SET_LOADING_STAGE", stage: "complete" }), i({
type: "SET_RATE_LIMIT",
rateLimit: h.getRateLimitInfo()
});
} catch (p) {
console.error("Error loading commits:", p);
const g = p instanceof Error ? p : new Error("Failed to load repository");
i({ type: "SET_ERROR", error: g.message }), i({ type: "SET_LOADING", loading: !1 }), i({ type: "SET_RATE_LIMITED_CACHE", rateLimitedCache: !1 }), i({
type: "SET_RATE_LIMIT",
rateLimit: h.getRateLimitInfo()
}), a && a(g);
}
} else {
i({ type: "SET_LOADING_STAGE", stage: "incremental" }), i({ type: "SET_LOADING", loading: !1 }), i({ type: "SET_BACKGROUND_LOADING", loading: !0 }), i({ type: "SET_LOAD_PROGRESS", progress: null }), i({ type: "RESET_COMMITS" }), i({ type: "SET_FROM_CACHE", fromCache: !1 });
try {
await h.getCommitHistory(
(p) => {
i({ type: "SET_LOAD_PROGRESS", progress: p });
},
c,
(p) => {
i({ type: "ADD_COMMIT", commit: p });
}
);
} catch (p) {
if (console.error("Error loading commits:", p), u.exists)
try {
const g = await h.getCommitHistory(
void 0,
// no progress updates needed
!1
// don't force refresh
);
i({ type: "SET_COMMITS", commits: g.commits }), i({ type: "SET_FROM_CACHE", fromCache: !0 }), i({
type: "SET_RATE_LIMITED_CACHE",
rateLimitedCache: !0
}), i({ type: "SET_ERROR", error: null }), console.warn(
"Using cached data due to API error:",
p instanceof Error ? p.message : "Unknown error"
);
} catch {
i({
type: "SET_ERROR",
error: p instanceof Error ? p.message : "Failed to load repository"
}), i({
type: "SET_RATE_LIMITED_CACHE",
rateLimitedCache: !1
});
}
else
i({
type: "SET_ERROR",
error: p instanceof Error ? p.message : "Failed to load repository"
}), i({
type: "SET_RATE_LIMITED_CACHE",
rateLimitedCache: !1
});
i({
type: "SET_RATE_LIMIT",
rateLimit: h.getRateLimitInfo()
});
} finally {
i({ type: "SET_BACKGROUND_LOADING", loading: !1 }), i({ type: "SET_LOAD_PROGRESS", progress: null }), i({ type: "SET_LOADING_STAGE", stage: "complete" });
}
}
},
[o, e, a]
);
A(() => {
m();
}, [m]);
const d = D(async () => {
if (console.log("[AUTOLOAD] loadMore() called", {
hasGitService: !!n.current,
backgroundLoading: r.backgroundLoading,
commitsLength: r.commits.length
}), !n.current || r.backgroundLoading) {
console.log(
"[AUTOLOAD] loadMore() skipped - no service or already loading"
);
return;
}
console.log("[AUTOLOAD] Starting background load..."), i({ type: "SET_BACKGROUND_LOADING", loading: !0 });
try {
const c = /* @__PURE__ */ new Map();
if (r.commits.length > 0) {
const u = r.commits[r.commits.length - 1];
for (const y of u.files)
c.set(y.path, y.size);
}
console.log(
"[AUTOLOAD] Calling gitService.loadMoreCommits with offset:",
r.commits.length
);
const h = await n.current.loadMoreCommits(
r.commits.length,
40,
c,
(u) => {
console.log("[AUTOLOAD] Received commit:", u.hash), i({ type: "APPEND_COMMITS", commits: [u] });
},
(u) => {
i({ type: "SET_LOAD_PROGRESS", progress: u });
}
);
console.log("[AUTOLOAD] Load complete:", {
newCommits: h.commits.length,
hasMore: h.hasMore,
totalCount: h.totalCount
}), i({
type: "SET_PAGINATION",
hasMore: h.hasMore,
totalAvailable: h.totalCount
}), i({ type: "SET_BACKGROUND_LOADING", loading: !1 }), i({ type: "SET_LOAD_PROGRESS", progress: null });
} catch (c) {
console.error("[AUTOLOAD] Error loading more commits:", c), i({ type: "SET_BACKGROUND_LOADING", loading: !1 }), i({ type: "SET_LOAD_PROGRESS", progress: null });
}
}, [r.commits, r.backgroundLoading]);
return {
...r,
loadCommits: m,
loadMore: d,
setCurrentTime: (c) => {
i(typeof c == "function" ? {
type: "SET_CURRENT_TIME",
time: c(r.currentTime)
} : { type: "SET_CURRENT_TIME", time: c });
}
};
}
function ce(o, t) {
if (o.length === 0) return 0;
for (let e = o.length - 1; e >= 0; e--)
if (o[e].date.getTime() <= t)
return e;
return 0;
}
function St({ repoPath: o }) {
return /* @__PURE__ */ l("div", { className: "w-full h-full flex items-center justify-center bg-slate-900 text-white", children: /* @__PURE__ */ f("div", { className: "text-center", children: [
/* @__PURE__ */ l("div", { className: "text-xl mb-2", children: "No commits found" }),
/* @__PURE__ */ f("div", { className: "text-gray-400", children: [
"Unable to load repository data for: ",
o
] })
] }) });
}
function le({
remaining: o,
limit: t,
resetTime: e
}) {
if (o === null || t === null)
return null;
const s = o / t * 100, a = s < 20, r = s < 5;
return /* @__PURE__ */ f("div", { className: "flex items-center gap-2 text-xs", children: [
r ? /* @__PURE__ */ l(se, { size: 14, className: "text-red-400" }) : a ? /* @__PURE__ */ l(se, { size: 14, className: "text-yellow-400" }) : /* @__PURE__ */ l(We, { size: 14, className: "text-green-400" }),
/* @__PURE__ */ f(
"div",
{
className: r ? "text-red-400" : a ? "text-yellow-400" : "text-gray-400",
children: [
"API: ",
o,
"/",
t,
e && o < t / 2 && /* @__PURE__ */ f("span", { className: "ml-1", children: [
"(resets ",
e.toLocaleTimeString(),
")"
] })
]
}
)
] });
}
function vt({
error: o,
repoPath: t,
rateLimit: e,
onBack: s,
onRetry: a
}) {
const r = o.includes("rate limit");
return /* @__PURE__ */ l("div", { className: "w-full h-full flex items-cent