diff --git a/.changeset/blue-points-dream.md b/.changeset/blue-points-dream.md
new file mode 100644
index 0000000000000..eacb88108a0f7
--- /dev/null
+++ b/.changeset/blue-points-dream.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Security Hotfix (https://docs.rocket.chat/docs/security-fixes-and-updates)
diff --git a/.changeset/blue-seals-leave.md b/.changeset/blue-seals-leave.md
new file mode 100644
index 0000000000000..62f6e7e9003fd
--- /dev/null
+++ b/.changeset/blue-seals-leave.md
@@ -0,0 +1,7 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/model-typings": patch
+"@rocket.chat/models": patch
+---
+
+Fixes an authorization issue that allowed users to confirm uploads from other users
diff --git a/.changeset/bright-dots-march.md b/.changeset/bright-dots-march.md
new file mode 100644
index 0000000000000..771343eea43f4
--- /dev/null
+++ b/.changeset/bright-dots-march.md
@@ -0,0 +1,7 @@
+---
+'@rocket.chat/meteor': patch
+'@rocket.chat/model-typings': patch
+'@rocket.chat/models': patch
+---
+
+Fixes main channel scroll position changing when jumping to a thread message from search
diff --git a/.changeset/clean-ears-fly.md b/.changeset/clean-ears-fly.md
new file mode 100644
index 0000000000000..781a8a4da4466
--- /dev/null
+++ b/.changeset/clean-ears-fly.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes a cross-resource access issue that allowed users to retrieve emojis from the Custom Sounds endpoint and sounds from the Custom Emojis endpoint when using the FileSystem storage mode.
diff --git a/.changeset/fix-blockquote-empty-lines.md b/.changeset/fix-blockquote-empty-lines.md
new file mode 100644
index 0000000000000..b3b463f3913a6
--- /dev/null
+++ b/.changeset/fix-blockquote-empty-lines.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/message-parser': patch
+---
+
+Fixed blockquotes with empty lines between paragraphs not rendering as a single blockquote. Lines like `> ` or `>` (empty quote lines) are now treated as part of the surrounding blockquote rather than breaking it into separate quotes.
diff --git a/.changeset/fix-message-parser-reduce-perf.md b/.changeset/fix-message-parser-reduce-perf.md
new file mode 100644
index 0000000000000..6601f8f205c43
--- /dev/null
+++ b/.changeset/fix-message-parser-reduce-perf.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/message-parser': patch
+---
+
+Replaces wasteful `filter().shift()` with `find(Boolean)` in `extractFirstResult` to avoid allocating an intermediate filtered array just to get the first truthy element.
diff --git a/.changeset/fix-register-workspace-i18n.md b/.changeset/fix-register-workspace-i18n.md
new file mode 100644
index 0000000000000..62eed988444d9
--- /dev/null
+++ b/.changeset/fix-register-workspace-i18n.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes wrong i18n key in RegisterWorkspace confirmation step so the text is translated instead of showing a missing key.
diff --git a/.changeset/fix-trailing-punctuation-url.md b/.changeset/fix-trailing-punctuation-url.md
new file mode 100644
index 0000000000000..f55255a46e574
--- /dev/null
+++ b/.changeset/fix-trailing-punctuation-url.md
@@ -0,0 +1,5 @@
+---
+"@rocket.chat/message-parser": patch
+---
+
+Fixes trailing punctuation (e.g. periods, exclamation marks) being incorrectly included in parsed URLs when they appear at the end of a message. For example, `go to https://www.google.com.` now correctly parses the URL as `https://www.google.com` without the trailing period.
diff --git a/.changeset/fix-webhook-newline.md b/.changeset/fix-webhook-newline.md
new file mode 100644
index 0000000000000..c622c57eb235b
--- /dev/null
+++ b/.changeset/fix-webhook-newline.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes incoming webhook messages ignoring literal `\n` escape sequences, and fixes the `MarkdownText` `document` variant not rendering newlines as line breaks.
diff --git a/.changeset/healthy-dragons-crash.md b/.changeset/healthy-dragons-crash.md
new file mode 100644
index 0000000000000..f08337723f544
--- /dev/null
+++ b/.changeset/healthy-dragons-crash.md
@@ -0,0 +1,9 @@
+---
+'@rocket.chat/fuselage-ui-kit': minor
+'@rocket.chat/ui-kit': major
+'@rocket.chat/apps-engine': minor
+'@rocket.chat/livechat': minor
+'@rocket.chat/meteor': minor
+---
+
+refactor(ui-kit): Remove UiKit deprecations
diff --git a/.changeset/honest-shrimps-cough.md b/.changeset/honest-shrimps-cough.md
new file mode 100644
index 0000000000000..c7711d4a8cc73
--- /dev/null
+++ b/.changeset/honest-shrimps-cough.md
@@ -0,0 +1,5 @@
+---
+"@rocket.chat/meteor": patch
+---
+
+Fixes inconsistent username formatting causing '@@username' for federated users
diff --git a/.changeset/hungry-monkeys-hang.md b/.changeset/hungry-monkeys-hang.md
new file mode 100644
index 0000000000000..f128167d7cee7
--- /dev/null
+++ b/.changeset/hungry-monkeys-hang.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": minor
+"@rocket.chat/rest-typings": minor
+---
+
+Add OpenAPI support for the Rocket.Chat autotranslate translateMessage API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation
diff --git a/.changeset/little-eyes-kneel.md b/.changeset/little-eyes-kneel.md
new file mode 100644
index 0000000000000..0ae80efde92b3
--- /dev/null
+++ b/.changeset/little-eyes-kneel.md
@@ -0,0 +1,76 @@
+---
+'@rocket.chat/eslint-config': minor
+'@rocket.chat/server-cloud-communication': patch
+'@rocket.chat/omnichannel-services': patch
+'@rocket.chat/omnichannel-transcript': patch
+'@rocket.chat/authorization-service': patch
+'@rocket.chat/federation-matrix': patch
+'@rocket.chat/web-ui-registration': patch
+'@rocket.chat/network-broker': patch
+'@rocket.chat/password-policies': patch
+'@rocket.chat/release-changelog': patch
+'@rocket.chat/storybook-config': patch
+'@rocket.chat/presence-service': patch
+'@rocket.chat/omni-core-ee': patch
+'@rocket.chat/fuselage-ui-kit': patch
+'@rocket.chat/instance-status': patch
+'@rocket.chat/media-signaling': patch
+'@rocket.chat/patch-injection': patch
+'@rocket.chat/account-service': patch
+'@rocket.chat/media-calls': patch
+'@rocket.chat/message-parser': patch
+'@rocket.chat/mock-providers': patch
+'@rocket.chat/release-action': patch
+'@rocket.chat/pdf-worker': patch
+'@rocket.chat/ui-theming': patch
+'@rocket.chat/account-utils': patch
+'@rocket.chat/core-services': patch
+'@rocket.chat/message-types': patch
+'@rocket.chat/model-typings': patch
+'@rocket.chat/mongo-adapter': patch
+'@rocket.chat/ui-video-conf': patch
+'@rocket.chat/uikit-playground': patch
+'@rocket.chat/cas-validate': patch
+'@rocket.chat/core-typings': patch
+'@rocket.chat/jest-presets': patch
+'@rocket.chat/peggy-loader': patch
+'@rocket.chat/rest-typings': patch
+'@rocket.chat/server-fetch': patch
+'@rocket.chat/ddp-streamer': patch
+'@rocket.chat/queue-worker': patch
+'@rocket.chat/presence': patch
+'@rocket.chat/apps-engine': patch
+'@rocket.chat/desktop-api': patch
+'@rocket.chat/http-router': patch
+'@rocket.chat/poplib': patch
+'@rocket.chat/ui-composer': patch
+'@rocket.chat/ui-contexts': patch
+'@rocket.chat/license': patch
+'@rocket.chat/api-client': patch
+'@rocket.chat/ddp-client': patch
+'@rocket.chat/log-format': patch
+'@rocket.chat/gazzodown': patch
+'@rocket.chat/omni-core': patch
+'@rocket.chat/ui-avatar': patch
+'@rocket.chat/ui-client': patch
+'@rocket.chat/livechat': patch
+'@rocket.chat/abac': patch
+'@rocket.chat/favicon': patch
+'@rocket.chat/tracing': patch
+'@rocket.chat/ui-voip': patch
+'@rocket.chat/agenda': patch
+'@rocket.chat/base64': patch
+'@rocket.chat/logger': patch
+'@rocket.chat/models': patch
+'@rocket.chat/random': patch
+'@rocket.chat/sha256': patch
+'@rocket.chat/ui-kit': patch
+'@rocket.chat/tools': patch
+'@rocket.chat/apps': patch
+'@rocket.chat/cron': patch
+'@rocket.chat/i18n': patch
+'@rocket.chat/jwt': patch
+'@rocket.chat/meteor': patch
+---
+
+chore(eslint): Upgrades ESLint and its configuration
diff --git a/.changeset/loud-weeks-protect.md b/.changeset/loud-weeks-protect.md
new file mode 100644
index 0000000000000..3317177f72765
--- /dev/null
+++ b/.changeset/loud-weeks-protect.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/message-parser': patch
+---
+
+Fixes ordered list AST generation to preserve `number: 0` for list items that start at index `0`.
diff --git a/.changeset/migrate-chat-follow-unfollow-message.md b/.changeset/migrate-chat-follow-unfollow-message.md
new file mode 100644
index 0000000000000..875c2ed9d1443
--- /dev/null
+++ b/.changeset/migrate-chat-follow-unfollow-message.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": minor
+"@rocket.chat/rest-typings": minor
+---
+
+Add OpenAPI support for the chat.followMessage and chat.unfollowMessage API endpoints by migrating to a modern chained route definition syntax and utilizing AJV schemas for body and response validation.
diff --git a/.changeset/migrate-chat-star-unstar-message.md b/.changeset/migrate-chat-star-unstar-message.md
new file mode 100644
index 0000000000000..395b6422747a5
--- /dev/null
+++ b/.changeset/migrate-chat-star-unstar-message.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": minor
+"@rocket.chat/rest-typings": minor
+---
+
+Add OpenAPI support for the chat.starMessage and chat.unStarMessage API endpoints by migrating to a modern chained route definition syntax and utilizing AJV schemas for body and response validation.
diff --git a/.changeset/migrate-rooms-leave-endpoint.md b/.changeset/migrate-rooms-leave-endpoint.md
new file mode 100644
index 0000000000000..4f9a6263a9a19
--- /dev/null
+++ b/.changeset/migrate-rooms-leave-endpoint.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/meteor': minor
+'@rocket.chat/rest-typings': minor
+---
+
+Migrated rooms.leave endpoint to new OpenAPI pattern with AJV validation
diff --git a/.changeset/nasty-candles-invent.md b/.changeset/nasty-candles-invent.md
new file mode 100644
index 0000000000000..2af4dcf9cebf1
--- /dev/null
+++ b/.changeset/nasty-candles-invent.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/web-ui-registration': patch
+'@rocket.chat/i18n': patch
+---
+
+Fixes invalid email domain error not being displayed on the registration form.
diff --git a/.changeset/nice-penguins-rhyme.md b/.changeset/nice-penguins-rhyme.md
new file mode 100644
index 0000000000000..5e89a31ef9739
--- /dev/null
+++ b/.changeset/nice-penguins-rhyme.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fix marking a message as sent before the request finishes
diff --git a/.changeset/nice-squids-smoke.md b/.changeset/nice-squids-smoke.md
new file mode 100644
index 0000000000000..a71f10d915f97
--- /dev/null
+++ b/.changeset/nice-squids-smoke.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/rest-typings": patch
+---
+
+Add OpenAPI support for the Rocket.Chat e2e.getUsersOfRoomWithoutKey endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/nine-otters-hug.md b/.changeset/nine-otters-hug.md
new file mode 100644
index 0000000000000..78868e3057732
--- /dev/null
+++ b/.changeset/nine-otters-hug.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/rest-typings': minor
+'@rocket.chat/meteor': minor
+---
+
+migrated rooms.delete endpoint to new OpenAPI pattern with AJV validation
diff --git a/.changeset/olive-hairs-report.md b/.changeset/olive-hairs-report.md
new file mode 100644
index 0000000000000..fff0535e67cbc
--- /dev/null
+++ b/.changeset/olive-hairs-report.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes version update banner showing outdated versions after server upgrade.
diff --git a/.changeset/orange-paws-poke.md b/.changeset/orange-paws-poke.md
new file mode 100644
index 0000000000000..2b49cea8f1442
--- /dev/null
+++ b/.changeset/orange-paws-poke.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/i18n': patch
+'@rocket.chat/meteor': patch
+---
+
+Deprecates `Anonymous write`. Feature will be removed in version 9.0.0.
diff --git a/.changeset/polite-plums-boil.md b/.changeset/polite-plums-boil.md
new file mode 100644
index 0000000000000..b9263d979581b
--- /dev/null
+++ b/.changeset/polite-plums-boil.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes the intermittent behavior where the "New messages" indicator appears incorrectly after the user sends a message
diff --git a/.changeset/pretty-jobs-juggle.md b/.changeset/pretty-jobs-juggle.md
new file mode 100644
index 0000000000000..028fc592dd034
--- /dev/null
+++ b/.changeset/pretty-jobs-juggle.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/rest-typings': minor
+'@rocket.chat/meteor': minor
+---
+
+Adds OpenAPI support for the Rocket.Chat e2e.updateGroupKey endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/rare-waves-help.md b/.changeset/rare-waves-help.md
new file mode 100644
index 0000000000000..476f7e0839153
--- /dev/null
+++ b/.changeset/rare-waves-help.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": minor
+"@rocket.chat/rest-typings": minor
+---
+
+Add OpenAPI support for the Rocket.Chat users.getAvatarSuggestion API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/red-windows-breathe.md b/.changeset/red-windows-breathe.md
new file mode 100644
index 0000000000000..a177574edea6b
--- /dev/null
+++ b/.changeset/red-windows-breathe.md
@@ -0,0 +1,5 @@
+---
+"@rocket.chat/meteor": patch
+---
+
+Fixes calendar events modifying the wrong status property when attempting to sync `busy` status.
diff --git a/.changeset/refactor-instances-api-chained-pattern.md b/.changeset/refactor-instances-api-chained-pattern.md
new file mode 100644
index 0000000000000..e38ef1235e7ef
--- /dev/null
+++ b/.changeset/refactor-instances-api-chained-pattern.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': minor
+---
+
+ adds `instances.get` API endpoint to new chained pattern with response schemas
diff --git a/.changeset/refactor-ldap-api-chained-pattern.md b/.changeset/refactor-ldap-api-chained-pattern.md
new file mode 100644
index 0000000000000..e402e8609cb46
--- /dev/null
+++ b/.changeset/refactor-ldap-api-chained-pattern.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': minor
+---
+
+Migrates `ldap.testConnection` and `ldap.testSearch` REST API endpoints from legacy `addRoute` pattern to the new chained `.post()` API pattern with typed response schemas and AJV body validation (replacing Meteor `check()`).
diff --git a/.changeset/refactor-presence-api-chained-pattern.md b/.changeset/refactor-presence-api-chained-pattern.md
new file mode 100644
index 0000000000000..cec1816fce98b
--- /dev/null
+++ b/.changeset/refactor-presence-api-chained-pattern.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': minor
+---
+
+Migrates `presence.getConnections` and `presence.enableBroadcast` REST API endpoints from legacy `addRoute` pattern to the new chained `.get()`/`.post()` API pattern with typed response schemas.
diff --git a/.changeset/rude-plums-think.md b/.changeset/rude-plums-think.md
new file mode 100644
index 0000000000000..6b5804f013757
--- /dev/null
+++ b/.changeset/rude-plums-think.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': minor
+---
+
+Fixes Custom Sounds Contextualbar state and refresh behavior
diff --git a/.changeset/shaggy-cars-watch.md b/.changeset/shaggy-cars-watch.md
new file mode 100644
index 0000000000000..56db9fd9b4e74
--- /dev/null
+++ b/.changeset/shaggy-cars-watch.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/ui-contexts": patch
+---
+
+Fixes a mismatch in the room icons on the sidebar items, ABAC Managed rooms were not displaying the correct icon
diff --git a/.changeset/shiny-pears-admire.md b/.changeset/shiny-pears-admire.md
new file mode 100644
index 0000000000000..0e8287d708f4e
--- /dev/null
+++ b/.changeset/shiny-pears-admire.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Limits `Outgoing webhook` maximum response size to 10mb.
diff --git a/.changeset/short-starfishes-provide.md b/.changeset/short-starfishes-provide.md
new file mode 100644
index 0000000000000..2d70c789a69cd
--- /dev/null
+++ b/.changeset/short-starfishes-provide.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": minor
+"@rocket.chat/rest-typings": minor
+---
+
+Add OpenAPI support for the Rocket.Chat e2e.fetchMyKeys endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/spicy-drinks-carry.md b/.changeset/spicy-drinks-carry.md
new file mode 100644
index 0000000000000..1b2119694d4cc
--- /dev/null
+++ b/.changeset/spicy-drinks-carry.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/rest-typings": patch
+---
+
+Add OpenAPI support for the Rocket.Chat push.test API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/sweet-terms-relax.md b/.changeset/sweet-terms-relax.md
new file mode 100644
index 0000000000000..8861e65f43250
--- /dev/null
+++ b/.changeset/sweet-terms-relax.md
@@ -0,0 +1,7 @@
+---
+'@rocket.chat/meteor': minor
+'@rocket.chat/core-typings': minor
+'@rocket.chat/rest-typings': minor
+---
+
+Add OpenAPI support for the Rocket.Chat custom-user-status.list API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation
diff --git a/.changeset/swift-badgers-try.md b/.changeset/swift-badgers-try.md
new file mode 100644
index 0000000000000..368d41a127c83
--- /dev/null
+++ b/.changeset/swift-badgers-try.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/rest-typings': minor
+'@rocket.chat/meteor': minor
+---
+
+Add OpenAPI support for the Rocket.Chat e2e endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/tame-dolphins-draw.md b/.changeset/tame-dolphins-draw.md
new file mode 100644
index 0000000000000..f42810fac68ce
--- /dev/null
+++ b/.changeset/tame-dolphins-draw.md
@@ -0,0 +1,5 @@
+---
+"@rocket.chat/meteor": patch
+---
+
+Fixes `inquiries.take` not failing when attempting to take a chat while over chat limits
diff --git a/.changeset/tame-humans-greet.md b/.changeset/tame-humans-greet.md
new file mode 100644
index 0000000000000..e5b0aa45eece6
--- /dev/null
+++ b/.changeset/tame-humans-greet.md
@@ -0,0 +1,5 @@
+---
+"@rocket.chat/meteor": patch
+---
+
+Fixes an issue where `Production` flag was not being respected when initializing Push Notifications configuration
diff --git a/.changeset/tame-tables-complain.md b/.changeset/tame-tables-complain.md
new file mode 100644
index 0000000000000..2d8c05a3f1432
--- /dev/null
+++ b/.changeset/tame-tables-complain.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes "Join" button on Outlook Calendar bubbling click event, also opening the calendar event details.
diff --git a/.changeset/tender-papayas-jam.md b/.changeset/tender-papayas-jam.md
new file mode 100644
index 0000000000000..d9e85e6d29425
--- /dev/null
+++ b/.changeset/tender-papayas-jam.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Limits Omnichannel webhook maximum response size to 10mb.
diff --git a/.changeset/tough-steaks-beam.md b/.changeset/tough-steaks-beam.md
new file mode 100644
index 0000000000000..cd0263fb496ed
--- /dev/null
+++ b/.changeset/tough-steaks-beam.md
@@ -0,0 +1,5 @@
+---
+'@rocket.chat/meteor': patch
+---
+
+Fixes reactivity of Custom Sounds and Custom Emojis storage settings
diff --git a/.changeset/tricky-boxes-type.md b/.changeset/tricky-boxes-type.md
new file mode 100644
index 0000000000000..084f3f79fe242
--- /dev/null
+++ b/.changeset/tricky-boxes-type.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/rest-typings": patch
+---
+
+Add OpenAPI support for the Rocket.Chat rooms.favorite APIs endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/twenty-colts-flash.md b/.changeset/twenty-colts-flash.md
new file mode 100644
index 0000000000000..93729a19533f6
--- /dev/null
+++ b/.changeset/twenty-colts-flash.md
@@ -0,0 +1,6 @@
+---
+'@rocket.chat/rest-typings': minor
+'@rocket.chat/meteor': minor
+---
+
+Adds new `custom-sounds.getOne` REST endpoint to retrieve a single custom sound by `_id` and updates client to consume it.
diff --git a/.changeset/unlucky-impalas-matter.md b/.changeset/unlucky-impalas-matter.md
new file mode 100644
index 0000000000000..ed56575dcec7c
--- /dev/null
+++ b/.changeset/unlucky-impalas-matter.md
@@ -0,0 +1,5 @@
+---
+"@rocket.chat/meteor": patch
+---
+
+Fixes `ssrf` validation for oauth endpoints, which allows internal endpoints to be used during the auth flow.
diff --git a/.changeset/weak-terms-shave.md b/.changeset/weak-terms-shave.md
new file mode 100644
index 0000000000000..1813edcdb2b5b
--- /dev/null
+++ b/.changeset/weak-terms-shave.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/rest-typings": patch
+---
+
+Add OpenAPI support for the Rocket.Chat emoji-custom.create API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/wet-roses-call.md b/.changeset/wet-roses-call.md
new file mode 100644
index 0000000000000..88cdcdb45362e
--- /dev/null
+++ b/.changeset/wet-roses-call.md
@@ -0,0 +1,7 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/rest-typings": patch
+"@rocket.chat/core-typings": patch
+---
+
+Add OpenAPI support for the Rocket.Chat commands.get API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.changeset/wicked-buckets-thank.md b/.changeset/wicked-buckets-thank.md
new file mode 100644
index 0000000000000..cc6f8af59fcce
--- /dev/null
+++ b/.changeset/wicked-buckets-thank.md
@@ -0,0 +1,6 @@
+---
+"@rocket.chat/meteor": patch
+"@rocket.chat/rest-typings": patch
+---
+
+Add OpenAPI support for the Rocket.Chat dm.close/im.close API endpoints by migrating to a modern chained route definition syntax and utilizing shared AJV schemas for validation to enhance API documentation and ensure type safety through response validation.
diff --git a/.cursor/files/test-cases.json b/.cursor/files/test-cases.json
new file mode 100644
index 0000000000000..1e8d0437bb461
--- /dev/null
+++ b/.cursor/files/test-cases.json
@@ -0,0 +1 @@
+{"cases":[],"suites":[{"id":606,"title":"Rocket.Chat","description":null,"preconditions":null,"suites":[{"id":6,"title":"Admin","description":null,"preconditions":null,"suites":[{"id":995,"title":"Login methods","description":null,"preconditions":null,"suites":[{"id":302,"title":"LDAP","description":null,"preconditions":null,"suites":[{"id":21,"title":"LDAP","description":null,"preconditions":null,"suites":[{"id":27,"title":"Data Sync Basic User","description":null,"preconditions":null,"suites":[],"cases":[{"id":3158,"title":"Group Filter: Load only users that are in a specific group.","description":null,"preconditions":"Have LDAP enable","postconditions":null,"priority":"medium","severity":"major","type":"functional","behavior":"positive","automation":"to-be-automated","status":"actual","is_flaky":"no","layer":"api","milestone":null,"custom_fields":[],"steps_type":"classic","steps":[{"position":1,"action":"Go to apache > New Entry > GroupofNames","expected_result":"a new group is going to be created","data":"","steps":[]},{"position":2,"action":"Add a member to the group","expected_result":"able to add","data":"","steps":[]},{"position":3,"action":"Go to Rocket.chat > workspace > settings > ldap > User search > Group Filter click enable","expected_result":"able to enable","data":"","steps":[]},{"position":4,"action":"On Group ObjectClass fill with groupOfNames","expected_result":"able to fill","data":"","steps":[]},{"position":5,"action":"on Group Member Attribute fill with member","expected_result":"able to fill","data":"","steps":[]},{"position":6,"action":"on Group Group Member Format fill with uid=#{username},ou=users,dc=rcldap,dc=com,dc=br","expected_result":"able to fill","data":"","steps":[]},{"position":7,"action":"on Group name","expected_result":"put rc","data":"","steps":[]},{"position":8,"action":"Click on save","expected_result":"Able to save","data":"","steps":[]},{"position":9,"action":"Click on Sync","expected_result":"able to sync","data":"","steps":[]},{"position":10,"action":"Go to users","expected_result":"Only the members of the group should be on the list.","data":"","steps":[]}],"tags":[],"params":[],"is_muted":"no"}]}],"cases":[]}],"cases":[{"id":2657,"title":"Sync users from oAuth with LDAP","description":null,"preconditions":null,"postconditions":null,"priority":"undefined","severity":"normal","type":"other","behavior":"undefined","automation":"is-not-automated","status":"actual","is_flaky":"no","layer":"unknown","milestone":null,"custom_fields":[],"steps_type":"classic","steps":[{"position":1,"action":"Go to **Workspace > Settings > LDAP > Enterprise**","expected_result":"","data":"","steps":[]},{"position":2,"action":"Disable \"Background Sync Import New Users\"","expected_result":"","data":"","steps":[]},{"position":3,"action":"Enable \"Background Sync Update Existing Users\" setting","expected_result":"","data":"","steps":[]},{"position":4,"action":"Go to \"Data Sync\" tab","expected_result":"","data":"","steps":[]},{"position":5,"action":"Enable \"Merge existent users\"","expected_result":"","data":"","steps":[]},{"position":6,"action":"On another browser window, login for the first time with an user via OAuth","expected_result":"","data":"","steps":[]},{"position":7,"action":"On the LDAP, change the information from the user","expected_result":"","data":"e.g: Change the name and the avatar","steps":[]},{"position":8,"action":"Go to **Workspace > Settings > LDAP > Enterprise**","expected_result":"","data":"","steps":[]},{"position":9,"action":"Click on \"Sync Now\"","expected_result":"","data":"","steps":[]},{"position":10,"action":"Go to Workspace -> Users","expected_result":"","data":"","steps":[]},{"position":11,"action":"Validate the information from the oAuth User","expected_result":"The name and avatar should be updated to match with LDAP","data":"","steps":[]}],"tags":[],"params":[],"is_muted":"no"}]}],"cases":[]},{"id":593,"title":"Settings","description":null,"preconditions":null,"suites":[{"id":804,"title":"Accounts","description":null,"preconditions":null,"suites":[{"id":805,"title":"Registration","description":null,"preconditions":null,"suites":[],"cases":[{"id":2969,"title":"Enable Manually Approve New Users","description":null,"preconditions":null,"postconditions":null,"priority":"high","severity":"major","type":"functional","behavior":"positive","automation":"automated","status":"actual","is_flaky":"no","layer":"e2e","milestone":null,"custom_fields":[],"steps_type":"classic","steps":[{"position":1,"action":"Go to Account > Registration","expected_result":"Able to go","data":"","steps":[]},{"position":2,"action":"Enable the toggle \"Manually enable new users\"","expected_result":"Able to enable","data":"","steps":[]},{"position":3,"action":"Click Save changes","expected_result":"Able to save","data":"","steps":[]},{"position":4,"action":"Make logout","expected_result":"Able to logout","data":"","steps":[]},{"position":5,"action":"Click Create User","expected_result":"Goes to the page create users","data":"","steps":[]},{"position":6,"action":"Fill all the required fields","expected_result":"Able to fill","data":"","steps":[]},{"position":7,"action":"Click Join team","expected_result":"A toast is gonna appear \"Before you can log in, your account must be manually activated by an administrator\"","data":"","steps":[]},{"position":8,"action":"On the login page, fill the username and Password with the user that you just create","expected_result":"A message is gonna appear \"Before you can login, your account must be manually activated by an administrator\"","data":"","steps":[]},{"position":9,"action":"Log in with a Admin","expected_result":"Able to log in","data":"","steps":[]},{"position":10,"action":"Go to Workspace > Users","expected_result":"Able to open the page","data":"","steps":[]},{"position":11,"action":"Find the user that you just create and click","expected_result":"A side menu is going to show","data":"","steps":[]},{"position":12,"action":"Click on the three dots and then click in Activate","expected_result":"The status of user is gonna change from diable to offline","data":"","steps":[]},{"position":13,"action":"Logout","expected_result":"Able to logout","data":"","steps":[]},{"position":14,"action":"Try again login with the user that you created","expected_result":"is redirect to the home page.","data":"","steps":[]}],"tags":[],"params":[],"is_muted":"no"}]}],"cases":[]},{"id":981,"title":"Messages","description":null,"preconditions":null,"suites":[],"cases":[{"id":3517,"title":"Validate auto cleanup functionality on OEmbed cache","description":null,"preconditions":null,"postconditions":null,"priority":"undefined","severity":"normal","type":"other","behavior":"undefined","automation":"is-not-automated","status":"actual","is_flaky":"no","layer":"unknown","milestone":null,"custom_fields":[{"id":1,"internal_id":1,"title":"Automation/Manual Link","type":1,"value":"Not Automated"}],"steps_type":"classic","steps":[{"position":1,"action":"Send some messages containing links with previews","expected_result":"","data":"","steps":[]},{"position":2,"action":"Go to\u00a0**Workspace > Settings > Message**","expected_result":"","data":"","steps":[]},{"position":3,"action":"Set the Embed Cache Expiration Days setting to a specific value (e.g., 0 days).","expected_result":"","data":"","steps":[]},{"position":4,"action":"Change the Cron job value in the code","expected_result":"","data":"The cron job runs everydat at 2:24am (this can be changed in\u00a0[this line](https://github.com/RocketChat/Rocket.Chat/pull/31336/files#diff-21161a95fbf3394faa4f2af4add5e90d8eeeb279694d3107c86f834c41059e99R6)\u00a0for test purposes -- use it as\u00a0`* * * * *`\u00a0to run every minute, for example).","steps":[]},{"position":5,"action":"Wait the time set for the cron job to run","expected_result":"A cron job to erase all OEmbed cache registries older than\u00a0`Embed Cache Expiration`\u00a0days should be erased from the database (there should be no document in the\u00a0`rocketchat_oembed_cache collection`\u00a0with an\u00a0`updatedAt`\u00a0field older than defined in the setting).","data":"","steps":[]}],"tags":[],"params":[],"is_muted":"no"}]}],"cases":[]}],"cases":[]},{"id":607,"title":"Chat","description":null,"preconditions":null,"suites":[{"id":272,"title":"Messages","description":"General Test Cases for using the message composer and dealing with messages in the chat of a channel","preconditions":null,"suites":[],"cases":[{"id":2280,"title":"Attachment description and attachment preview size","description":"In this task fixed the bug where the attachment description would break to match the preview attachment size. In this test case, we want to make sure the attachment is not breaking regardless of the room type.","preconditions":"* You need to have an image file in your computer","postconditions":null,"priority":"undefined","severity":"normal","type":"other","behavior":"undefined","automation":"is-not-automated","status":"actual","is_flaky":"no","layer":"unknown","milestone":null,"custom_fields":[],"steps_type":"classic","steps":[{"position":1,"action":"Login the server","expected_result":"","data":"","steps":[]},{"position":2,"action":"Create a room","expected_result":"","data":"","steps":[]},{"position":3,"action":"Open the room and on the message composer, click on \u201c+\u201d \u2192 \u201cComputer\u201d","expected_result":"","data":"","steps":[]},{"position":4,"action":"Select an image from your computer","expected_result":"","data":"","steps":[]},{"position":5,"action":"Add the file description as a long text","expected_result":"","data":"You can use the text generated by this website: https://www.lipsum.com/","steps":[]},{"position":6,"action":"Send the image","expected_result":"The text should fill in the screen and not break lines before it","data":"","steps":[]}],"tags":[],"params":[{"title":"room","values":["channel","team","direct message","discussion"]}],"is_muted":"no"}]}],"cases":[]}],"cases":[]}]}
\ No newline at end of file
diff --git a/.cursor/rules/playwright.mdc b/.cursor/rules/playwright.mdc
new file mode 100644
index 0000000000000..1276ee2ce715d
--- /dev/null
+++ b/.cursor/rules/playwright.mdc
@@ -0,0 +1,67 @@
+---
+description:
+globs:
+alwaysApply: false
+---
+# Cursor AI Rules for Playwright Test Development
+
+## Context & Role
+You are a Senior QA Automation Engineer with deep expertise in:
+- TypeScript/JavaScript development
+- Playwright end-to-end testing framework
+- Frontend/Backend testing strategies
+- Rocket.Chat application testing
+
+## Code Quality Standards
+- Write concise, technical TypeScript/JavaScript with accurate typing
+- Use descriptive test names that clearly communicate expected behavior
+- Follow DRY (Do not Repeat Yourself) principles by extracting reusable logic into helper functions
+- Avoid code comments in the implementation
+
+## File Structure & Organization
+- **Test Location**: All test files must be created in `apps/meteor/tests/e2e/`
+- **Page Objects**: Utilize existing page objects pattern from `apps/meteor/tests/e2e/page-objects/`
+- **File Naming**: Use `.spec.ts` extension (e.g., `login.spec.ts`)
+- **Configuration**: Reference `playwright.config.ts` for global settings
+
+## Playwright Best Practices
+
+### Locator Strategy
+- **Avoid using `page.locator()`** - always prefer semantic locators, such as:
+ - `page.getByRole()` for interactive elements
+ - `page.getByLabel()` for form fields
+ - `page.getByText()` for text content
+ - `page.getByTitle()` for titled elements
+- Store commonly used locators in variables/constants for reuse
+
+### Test Structure
+- Use `test.beforeAll()` and `test.afterAll()` for setup/teardown
+- Use `test.step()` for complex test scenarios to improve organization
+- Group related tests in the same file
+- Utilize Playwright fixtures (`test`, `page`, `expect`) for consistency
+
+### Assertions & Waiting
+- Prefer to use web-first assertions (`toBeVisible`, `toHaveText`, etc.) whenever possible
+- Use `expect` matchers for assertions (`toEqual`, `toContain`, `toBeTruthy`, `toHaveLength`, etc.) that can be used to assert any conditions and avoid using `assert` statements
+- Use `page.waitFor()` with specific conditions instead of hardcoded timeouts
+- Implement proper wait strategies for dynamic content
+
+### Architecture Patterns
+- Follow Page Object Model pattern consistently
+- Maintain test isolation between test cases
+- Ensure clean state for each test execution
+- Ensure tests run reliably in parallel without shared state conflicts
+- Reuse existing test files when appropriate, create new ones when needed
+
+## Reference Documentation
+- Primary: [Playwright Testing Guide](mdc:https:/playwright.dev/docs/writing-tests)
+- Secondary: [Rocket.Chat Documentation](mdc:https:/docs.rocket.chat/docs/rocketchat)
+
+## Expected Output Format
+When generating tests, provide:
+1. Complete, runnable TypeScript test files
+2. Proper import statements and dependencies
+3. Well-structured test suites with clear describe/test blocks
+4. Implementation that follows all above guidelines without deviation
+
+Focus on creating maintainable, reliable end-to-end tests that accurately reflect user workflows and system behavior.
diff --git a/.cursor/rules/test-cases.mdc b/.cursor/rules/test-cases.mdc
new file mode 100644
index 0000000000000..71afc52f71941
--- /dev/null
+++ b/.cursor/rules/test-cases.mdc
@@ -0,0 +1,63 @@
+---
+description:
+globs:
+alwaysApply: false
+---
+# Cursor AI Rules for Manual Test Case Creation
+
+## Context & Role
+You are a Senior QA Engineer at Rocket.Chat, responsible for designing high-quality manual test cases that ensure product stability and comprehensive feature coverage. You deliver clear, concise tests that enable effective validation and maintain consistent quality standards.
+
+## Required Context Files
+**MANDATORY**: Always load these files into context before creating test cases:
+- [test-cases.json](mdc:.cursor/files/test-cases.json) - Reference format and existing test case structures
+
+## Test Case Standards
+
+### Quality Requirements
+- Write test cases in Markdown format following the standardized template
+- Include ALL necessary components: Title, Description, Preconditions, Type, Steps, and Expected Result
+- Ensure steps are clear, concise, and reproducible by any team member
+- Keep naming consistent and easy to search or filter in reports
+- Focus on comprehensive feature coverage and edge case validation
+
+### Test Type Classification
+Define the most appropriate test type for each scenario:
+- **API**: Backend service testing, data validation, integration points
+- **E2E**: Complete user workflows, cross-system functionality
+- **Unit**: Individual component or function testing
+
+### Content Guidelines
+- Use descriptive, searchable titles that clearly identify the feature being tested
+- Write concise descriptions that explain the test's purpose
+- List specific preconditions required before test execution
+- Create step-by-step instructions that any team member can follow
+- Define clear, measurable expected results
+
+## Standard Test Case Format
+
+```markdown
+## Test Case: [Descriptive Title]
+**Description**: [Short, clear description of what is being tested]
+**Preconditions**: [List of required setup conditions]
+**Type:** [api/e2e/unit]
+
+**Steps**:
+1. [step 1]
+2. [step 2]
+**Expected Result**: [Specific, measurable expected outcome]
+```
+
+## Reference Documentation
+- Primary: [Rocket.Chat Documentation](https://docs.rocket.chat/docs/rocketchat)
+- Context: Use provided reference files for implementation guidance
+
+## Expected Output Format
+When creating test cases, provide:
+1. Complete test cases following the exact markdown format
+2. Appropriate test type classification based on scope
+3. Comprehensive step coverage without gaps
+4. Clear, actionable instructions for manual execution
+5. Specific expected results that can be validated
+
+Focus on creating test cases that can later be converted into automated tests while ensuring thorough manual validation coverage.
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index b98cde5f50299..699185914302b 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -17,7 +17,6 @@
/apps/meteor/tests/unit/server @RocketChat/backend
/apps/meteor/app/apps/ @RocketChat/apps
/apps/meteor/app/livechat @RocketChat/omnichannel
-/apps/meteor/app/voip @RocketChat/omnichannel
/apps/meteor/app/sms @RocketChat/omnichannel
/apps/meteor/app/api @RocketChat/backend
/apps/meteor/app/federation @RocketChat/backend
@@ -27,12 +26,9 @@
/packages/models @RocketChat/Architecture
apps/meteor/server/startup/migrations @RocketChat/Architecture
/apps/meteor/packages/rocketchat-livechat @RocketChat/omnichannel
-/apps/meteor/server/services/voip-asterisk @RocketChat/omnichannel
-/apps/meteor/server/services/omnichannel-voip @RocketChat/omnichannel
/apps/meteor/server/features/EmailInbox @RocketChat/omnichannel
/apps/meteor/ee/app/canned-responses @RocketChat/omnichannel
/apps/meteor/ee/app/livechat @RocketChat/omnichannel
/apps/meteor/ee/app/livechat-enterprise @RocketChat/omnichannel
/apps/meteor/client/omnichannel @RocketChat/omnichannel
/apps/meteor/client/components/omnichannel @RocketChat/omnichannel
-/apps/meteor/client/components/voip @RocketChat/omnichannel
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index aa990adb6ee7f..85a336507712e 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -18,7 +18,7 @@ __Note:__ If there's a feature you'd like, there's a bug you'd like to fix, or y
## Development Guidelines
-Check out our Handbook for the [Development Guidelines](https://handbook.rocket.chat/product/development/development-guidelines) on how to set up your environment, do code, test and push your code. There you find our patterns on how to compose your Pull Requests' titles to have your contribution accepted.
+Check out our Handbook for the [Development Guidelines](https://handbook.rocket.chat/space/RnD/359891343/%F0%9F%8C%90+Development+Guidelines) on how to set up your environment, do code, test and push your code. There you find our patterns on how to compose your Pull Requests' titles to have your contribution accepted.
## Contributor License Agreement
diff --git a/.github/actions/build-docker/action.yml b/.github/actions/build-docker/action.yml
index 5bc982877ad0f..1bc8542e20464 100644
--- a/.github/actions/build-docker/action.yml
+++ b/.github/actions/build-docker/action.yml
@@ -1,41 +1,34 @@
-name: 'Meteor Docker'
+name: 'Build Docker'
inputs:
CR_USER:
required: true
CR_PAT:
required: true
- node-version:
- required: true
- description: 'Node version'
- type: string
deno-version:
required: true
description: 'Deno version'
type: string
- platform:
+ arch:
required: false
- description: 'Platform'
- default: 'alpine'
- build-containers:
+ description: 'Architecture'
+ default: 'arm64'
+ service:
required: false
- description: 'Containers to build along with Rocket.Chat'
+ description: 'Container to build'
type: string
- turbo-cache:
- required: false
- description: 'Enable turbo cache'
- default: 'true'
publish-image:
required: false
description: 'Publish image'
default: 'true'
- setup:
+ setup-docker:
required: false
- description: 'Setup node.js'
- default: 'true'
- NPM_TOKEN:
+ description: 'Setup Docker'
+ default: true
+ type:
required: false
- description: 'NPM token'
+ description: 'production or coverage'
+ default: 'coverage'
runs:
using: composite
@@ -49,66 +42,138 @@ runs:
username: ${{ inputs.CR_USER }}
password: ${{ inputs.CR_PAT }}
- - name: Restore build
- uses: actions/download-artifact@v4
+ - name: Restore meteor build
+ if: inputs.service == 'rocketchat'
+ uses: actions/download-artifact@v6
with:
- name: build
+ name: build-${{ inputs.type }}
path: /tmp/build
- - name: Unpack build
+ - name: Unpack meteor build
+ if: inputs.service == 'rocketchat'
shell: bash
run: |
cd /tmp/build
tar xzf Rocket.Chat.tar.gz
rm Rocket.Chat.tar.gz
- - uses: rharkor/caching-for-turbo@v1.5
- # if we are testing a PR from a fork, we already called the turbo cache at this point, so it should be false
- if: inputs.turbo-cache == 'true'
-
- - name: Setup NodeJS
- uses: ./.github/actions/setup-node
- if: inputs.setup == 'true'
- with:
- node-version: ${{ inputs.node-version }}
- deno-version: ${{ inputs.deno-version }}
- cache-modules: true
- install: true
- NPM_TOKEN: ${{ inputs.NPM_TOKEN }}
-
- - name: Restore turbo build
- uses: actions/download-artifact@v4
+ - name: Set up Docker
+ if: inputs.setup-docker == true
+ uses: docker/setup-docker-action@v4
with:
- name: turbo-build
- path: .turbo/cache
-
- - run: yarn build
- if: inputs.setup == 'true'
- shell: bash
- - if: ${{ inputs.platform == 'alpine' }}
- uses: actions/cache@v4
- with:
- path: /tmp/build/matrix-sdk-crypto.linux-x64-musl.node
- key: matrix-rust-sdk-crypto-nodejs-v0.2.0-beta.1
+ daemon-config: |
+ {
+ "debug": false,
+ "features": {
+ "containerd-snapshotter": true
+ }
+ }
- name: Build Docker images
shell: bash
run: |
- args=(rocketchat ${{ inputs.build-containers }})
-
+ set -o xtrace
export DENO_VERSION="${{ inputs.deno-version }}"
- docker compose -f docker-compose-ci.yml build "${args[@]}"
+ # Removes unnecessary swc cores and sharp binaries to reduce image size
+ swc_arch='x64'
+ if [[ "${{ inputs.service }}" == 'rocketchat' ]]; then
+ if [[ "${{ inputs.arch }}" == 'arm64' ]]; then
+ swc_arch='arm64'
+ fi
- - name: Publish Docker images to GitHub Container Registry
- if: inputs.publish-image == 'true' && github.actor != 'dependabot[bot]' && (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'release' || github.ref == 'refs/heads/develop')
+ find /tmp/build/bundle/programs/server/npm/node_modules/meteor/babel-compiler/node_modules/@meteorjs/swc-core/.swc/node_modules/@swc -type d -name 'core-*' -not -name "*linux-${swc_arch}-gnu*" -exec rm -rf {} +
+
+ find /tmp/build/bundle/programs/server/npm/node_modules/@img -type d -name 'sharp-*' -not -name "*-linuxmusl-${swc_arch}" -exec rm -rf {} +
+
+ find /tmp/build/bundle/programs/server/npm/node_modules/@napi-rs -type d -name 'pinyin-linux-*' -not -name "*-linux-${swc_arch}-*" -exec rm -rf {} +
+
+ find /tmp/build/bundle/programs/server/npm/node_modules/@esbuild -type d -name 'linux-*' -not -name "*-${swc_arch}" -exec rm -rf {} +
+
+ find /tmp/build/bundle/programs/server/npm/node_modules/@rocket.chat/apps-engine/node_modules/@esbuild -type d -name 'linux-*' -not -name "*-${swc_arch}" -exec rm -rf {} +
+ fi
+
+ if [[ "${{ inputs.publish-image }}" == 'true' ]]; then
+ LOAD_OR_PUSH="--push"
+ else
+ LOAD_OR_PUSH="--load"
+ fi
+
+ # Get image name from docker-compose-ci.yml since rocketchat image is different from service name (rocket.chat)
+ IMAGE=$(docker compose -f docker-compose-ci.yml config --format json 2>/dev/null | jq -r --arg s "${{ inputs.service }}" '.services[$s].image')
+
+ docker buildx bake \
+ -f docker-compose-ci.yml \
+ ${LOAD_OR_PUSH} \
+ --allow=fs.read=/tmp/build \
+ --set "*.tags+=${IMAGE}-gha-run-${{ github.run_id }}" \
+ --set "*.labels.org.opencontainers.image.description=Build run: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" \
+ --set "*.labels.org.opencontainers.image.source=${{ github.server_url }}/${{ github.repository }}" \
+ --set *.platform=linux/${{ inputs.arch }} \
+ --set *.cache-from=type=gha \
+ --set *.cache-to=type=gha,mode=max \
+ --provenance=false \
+ --sbom=false \
+ --metadata-file "/tmp/meta.json" \
+ "${{ inputs.service }}"
+
+ echo "Contents of /tmp/meta.json:"
+ cat /tmp/meta.json
+
+ if [[ "${{ inputs.publish-image }}" == 'true' ]]; then
+ SERVICE_SUFFIX=${{ inputs.service == 'rocketchat' && inputs.type == 'coverage' && (github.event_name == 'release' || github.ref == 'refs/heads/develop') && '-cov' || '' }}
+
+ mkdir -p /tmp/manifests/${{ inputs.service }}${SERVICE_SUFFIX}/${{ inputs.arch }}
+
+ # Get digest and image info
+ DIGEST=$(jq -r '.["${{ inputs.service }}"].["containerimage.digest"]' "/tmp/meta.json")
+ IMAGE_NO_TAG=$(echo "$IMAGE" | sed 's/:.*$//')
+ FULL_IMAGE="${IMAGE_NO_TAG}@${DIGEST}"
+
+ echo "Inspecting image: $FULL_IMAGE"
+
+ # Inspect the image and save complete manifest with sizes (using -v for verbose)
+ docker manifest inspect -v "$FULL_IMAGE" > "/tmp/manifests/${{ inputs.service }}${SERVICE_SUFFIX}/${{ inputs.arch }}/manifest.json"
+
+ echo "Saved manifest to /tmp/manifests/${{ inputs.service }}${SERVICE_SUFFIX}/${{ inputs.arch }}/manifest.json"
+ cat "/tmp/manifests/${{ inputs.service }}${SERVICE_SUFFIX}/${{ inputs.arch }}/manifest.json" | jq '.'
+ fi
+
+ - name: Save Docker image as artifact
+ if: inputs.publish-image == 'false' && inputs.arch == 'amd64'
shell: bash
run: |
- args=(rocketchat ${{ inputs.build-containers }})
+ set -o xtrace
+
+ # Get image name from docker-compose-ci.yml
+ IMAGE=$(docker compose -f docker-compose-ci.yml config --format json 2>/dev/null | jq -r --arg s "${{ inputs.service }}" '.services[$s].image')
+
+ # Create directory for image archives
+ mkdir -p /tmp/docker-images
+
+ # Save the image to a tar file
+ docker save "${IMAGE}" -o "/tmp/docker-images/${{ inputs.service }}-${{ inputs.arch }}-${{ inputs.type }}.tar"
- docker compose -f docker-compose-ci.yml push "${args[@]}"
+ echo "Saved image to /tmp/docker-images/${{ inputs.service }}-${{ inputs.arch }}-${{ inputs.type }}.tar"
+ ls -lh /tmp/docker-images/
+
+ - name: Upload Docker image artifact
+ if: inputs.publish-image == 'false' && inputs.arch == 'amd64'
+ uses: actions/upload-artifact@v4
+ with:
+ name: docker-image-${{ inputs.service }}-${{ inputs.arch }}-${{ inputs.type }}
+ path: /tmp/docker-images/${{ inputs.service }}-${{ inputs.arch }}-${{ inputs.type }}.tar
+ retention-days: 1
+
+ - uses: actions/upload-artifact@v4
+ if: inputs.publish-image == 'true'
+ with:
+ name: manifests-${{ inputs.service }}-${{ inputs.arch }}-${{ inputs.type }}
+ path: /tmp/manifests
+ retention-days: 5
- name: Clean up temporary files
+ if: inputs.service == 'rocketchat'
shell: bash
run: |
- sudo rm -rf /tmp/bundle
+ sudo rm -rf /tmp/build
diff --git a/.github/actions/docker-image-size-tracker/action.yml b/.github/actions/docker-image-size-tracker/action.yml
new file mode 100644
index 0000000000000..5edd90d3bab21
--- /dev/null
+++ b/.github/actions/docker-image-size-tracker/action.yml
@@ -0,0 +1,580 @@
+name: 'Docker Image Size Tracker'
+description: 'Track and report Docker image sizes in Pull Requests'
+author: 'Rocket.Chat'
+
+inputs:
+ github-token:
+ description: 'GitHub token for commenting on PRs'
+ required: true
+ ci-pat:
+ description: 'GitHub token for committing to history branch'
+ required: true
+ registry:
+ description: 'Container registry (e.g., ghcr.io)'
+ required: false
+ default: 'ghcr.io'
+ repository:
+ description: 'Repository name (e.g., rocketchat)'
+ required: true
+ tag:
+ description: 'Image tag to measure'
+ required: true
+ baseline-tag:
+ description: 'Baseline tag to compare against'
+ required: false
+ default: 'develop'
+ platform:
+ description: 'Platform architecture to compare (amd64 or arm64)'
+ required: false
+ default: 'amd64'
+ size-thresholds:
+ description: 'Optional JSON: per-image trigger thresholds. Only comment when an image increase exceeds its threshold. Example: {"rocketchat":{"mb":50,"percent":5},"omnichannel":{"mb":10,"percent":2}}'
+ required: false
+ default: ''
+ fail-thresholds:
+ description: 'Optional JSON: per-image fail thresholds. Task fails when an image increase exceeds its threshold (mb and/or percent). Example: {"rocketchat":{"mb":100,"percent":15}}'
+ required: false
+ default: ''
+
+outputs:
+ total-size:
+ description: 'Total size in bytes'
+ value: ${{ steps.measure.outputs.total-size }}
+ size-diff:
+ description: 'Size difference in bytes'
+ value: ${{ steps.compare.outputs.size-diff }}
+ size-diff-percent:
+ description: 'Size difference percentage'
+ value: ${{ steps.compare.outputs.size-diff-percent }}
+ comment-triggered:
+ description: 'Whether to post PR comment (only when size is bigger and thresholds met)'
+ value: ${{ steps.compare.outputs.comment-triggered }}
+ failed:
+ description: 'True if image size exceeded fail-thresholds'
+ value: ${{ steps.compare.outputs.failed }}
+
+runs:
+ using: 'composite'
+ steps:
+ - name: Download manifests
+ uses: actions/download-artifact@v6
+ with:
+ pattern: manifests-*
+ path: /tmp/manifests
+ merge-multiple: true
+
+ - name: Measure image sizes from artifacts
+ id: measure
+ shell: bash
+ env:
+ PLATFORM: ${{ inputs.platform }}
+ TAG: ${{ inputs.tag }}
+ run: |
+ echo "Reading image sizes from build artifacts for platform: $PLATFORM"
+
+ declare -A sizes
+ declare -a services_list
+ total=0
+
+ # Loop through service directories (same as publish workflow)
+ shopt -s nullglob
+ for service_dir in /tmp/manifests/*; do
+ [[ -d "$service_dir" ]] || continue
+ service="$(basename "$service_dir")"
+
+ echo "Processing service: $service"
+ services_list+=("$service")
+
+ size=0
+ # Read only the specified platform architecture
+ manifest_file="$service_dir/$PLATFORM/manifest.json"
+ if [[ -f "$manifest_file" ]]; then
+ # Docker manifest inspect -v returns SchemaV2Manifest with sizes
+ # Extract config size and layer sizes
+ config_size=$(jq -r '.SchemaV2Manifest.config.size // 0' "$manifest_file")
+ layers_size=$(jq '[.SchemaV2Manifest.layers[]?.size // 0] | add // 0' "$manifest_file")
+ size=$((config_size + layers_size))
+
+ echo " → Found $manifest_file: $size bytes (config: $config_size, layers: $layers_size)"
+ else
+ echo " ⚠ Manifest not found for platform $PLATFORM: $manifest_file"
+ fi
+
+ sizes[$service]=$size
+ total=$((total + size))
+ done
+
+ echo "Total size (all services, $PLATFORM only): $total bytes"
+ echo "total-size=$total" >> $GITHUB_OUTPUT
+
+ # Save to JSON
+ echo "{" > current-sizes.json
+ echo " \"timestamp\": \"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"," >> current-sizes.json
+ echo " \"tag\": \"$TAG\"," >> current-sizes.json
+ echo " \"total\": $total," >> current-sizes.json
+ echo " \"services\": {" >> current-sizes.json
+
+ first=true
+ for service in "${services_list[@]}"; do
+ if [[ "$first" == "true" ]]; then
+ first=false
+ else
+ echo "," >> current-sizes.json
+ fi
+ echo " \"$service\": ${sizes[$service]}" >> current-sizes.json
+ done
+
+ echo " }" >> current-sizes.json
+ echo "}" >> current-sizes.json
+
+ echo "Current sizes saved:"
+ cat current-sizes.json
+
+ # Save services list for baseline measurement
+ printf '%s\n' "${services_list[@]}" > /tmp/services-list.txt
+
+ - name: Measure baseline
+ id: baseline
+ shell: bash
+ continue-on-error: true
+ env:
+ REGISTRY: ${{ inputs.registry }}
+ ORG: ${{ inputs.repository }}
+ TAG: ${{ inputs.baseline-tag }}
+ PLATFORM: ${{ inputs.platform }}
+ run: |
+ echo "Measuring baseline: $REGISTRY/$ORG/*:$TAG (platform: $PLATFORM)"
+
+ declare -A sizes
+ declare -a services_list
+ total=0
+
+ # Read services list from current measurement
+ while IFS= read -r service; do
+ services_list+=("$service")
+
+ # Map service name to image name (handle rocketchat -> rocket.chat)
+ if [[ "$service" == "rocketchat" ]] || [[ "$service" == "rocketchat-cov" ]]; then
+ image_name="rocket.chat"
+ [[ "$service" == "rocketchat-cov" ]] && image_name="rocket.chat-cov"
+ else
+ image_name="$service"
+ fi
+
+ image="$REGISTRY/$ORG/$image_name:$TAG"
+ echo " → Inspecting $image"
+
+ size=0
+ if manifest=$(docker manifest inspect "$image" 2>/dev/null); then
+ # Check if it's a manifest list (multi-arch)
+ if echo "$manifest" | jq -e '.manifests' > /dev/null 2>&1; then
+ # Manifest list - find the specified platform
+ echo " → Multi-arch manifest detected, filtering for $PLATFORM"
+ platform_digest=$(echo "$manifest" | jq -r --arg arch "$PLATFORM" '.manifests[] | select(.platform.architecture == $arch) | .digest' | head -1)
+
+ if [[ -n "$platform_digest" ]]; then
+ echo " → Inspecting $PLATFORM platform: $platform_digest"
+ if platform_manifest=$(docker manifest inspect "$REGISTRY/$ORG/$image_name@$platform_digest" 2>/dev/null); then
+ config_size=$(echo "$platform_manifest" | jq -r '.config.size // 0')
+ layers_size=$(echo "$platform_manifest" | jq '[.layers[]?.size // 0] | add // 0')
+ size=$((config_size + layers_size))
+ echo " → Size: $size bytes"
+ fi
+ else
+ echo " ⚠ Platform $PLATFORM not found in manifest"
+ fi
+ else
+ # Single arch manifest
+ config_size=$(echo "$manifest" | jq -r '.config.size // 0')
+ layers_size=$(echo "$manifest" | jq '[.layers[]?.size // 0] | add // 0')
+ size=$((config_size + layers_size))
+ fi
+ fi
+
+ sizes[$service]=$size
+ total=$((total + size))
+ done < /tmp/services-list.txt
+
+ echo "baseline-total=$total" >> $GITHUB_OUTPUT
+
+ echo "{" > baseline-sizes.json
+ echo " \"timestamp\": \"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"," >> baseline-sizes.json
+ echo " \"tag\": \"$TAG\"," >> baseline-sizes.json
+ echo " \"total\": $total," >> baseline-sizes.json
+ echo " \"services\": {" >> baseline-sizes.json
+
+ first=true
+ for service in "${services_list[@]}"; do
+ if [[ "$first" == "true" ]]; then
+ first=false
+ else
+ echo "," >> baseline-sizes.json
+ fi
+ echo " \"$service\": ${sizes[$service]}" >> baseline-sizes.json
+ done
+
+ echo " }" >> baseline-sizes.json
+ echo "}" >> baseline-sizes.json
+
+ - name: Setup history storage
+ id: history
+ shell: bash
+ run: |
+ git config --global user.name "github-actions[bot]"
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+
+ # Create a separate worktree for history branch
+ mkdir -p /tmp/history-worktree
+
+ # Try to fetch history branch
+ if git ls-remote --heads origin image-size-history | grep -q image-size-history; then
+ git fetch origin image-size-history
+ git worktree add /tmp/history-worktree image-size-history
+ else
+ # Create orphan branch for history in worktree
+ git worktree add --detach /tmp/history-worktree
+ cd /tmp/history-worktree
+ git checkout --orphan image-size-history
+ git rm -rf . 2>/dev/null || true
+ mkdir -p history
+ echo "# Image Size History" > README.md
+ echo "This branch stores historical image size data for tracking" >> README.md
+ git add README.md
+ git commit -m "Initialize image size history"
+ git push origin image-size-history
+ cd -
+ fi
+
+ mkdir -p /tmp/history-worktree/history
+
+ - name: Load historical data
+ shell: bash
+ run: |
+ # Load last 30 measurements and group by day (keep only last entry per day)
+ echo "[]" > history-data.json
+ if [[ -d /tmp/history-worktree/history ]]; then
+ jq -s '.' /tmp/history-worktree/history/*.json 2>/dev/null | jq '
+ sort_by(.timestamp) |
+ group_by(.timestamp[0:10]) |
+ map(.[-1]) |
+ .[-30:]
+ ' > history-data.json || echo "[]" > history-data.json
+ fi
+
+ count=$(jq 'length' history-data.json)
+ echo "Loaded $count historical measurements (one per day)"
+
+ - name: Save current measurement to history
+ if: github.ref == 'refs/heads/develop'
+ shell: bash
+ env:
+ CI_PAT: ${{ inputs.ci-pat }}
+ run: |
+ timestamp=$(date -u +%Y%m%d-%H%M%S)
+ commit_sha="${{ github.sha }}"
+
+ # Add commit info to current measurement
+ jq --arg sha "$commit_sha" '. + {commit: $sha}' current-sizes.json > "/tmp/history-worktree/history/${timestamp}.json"
+
+ cd /tmp/history-worktree
+ git add "history/${timestamp}.json"
+ git commit -m "Add measurement for ${timestamp} (${commit_sha:0:7})"
+ git config --global user.email "ci@rocket.chat"
+ git config --global user.name "rocketchat-ci[bot]"
+ git config --global url.https://$CI_PAT@github.com/.insteadOf https://github.com/
+ git push origin image-size-history
+ cd -
+
+ echo "Saved measurement to history"
+
+ - name: Compare and generate report
+ id: compare
+ shell: bash
+ env:
+ SIZE_THRESHOLDS: ${{ inputs.size-thresholds }}
+ FAIL_THRESHOLDS: ${{ inputs.fail-thresholds }}
+ TAG: ${{ inputs.tag }}
+ BASELINE_TAG: ${{ inputs.baseline-tag }}
+ run: |
+ current_total=$(jq -r '.total' current-sizes.json)
+
+ if [[ ! -f baseline-sizes.json ]]; then
+ echo "No baseline available"
+ echo "size-diff=0" >> $GITHUB_OUTPUT
+ echo "size-diff-percent=0" >> $GITHUB_OUTPUT
+ echo "comment-triggered=false" >> $GITHUB_OUTPUT
+
+ cat > report.md << 'EOF'
+ # 📦 Docker Image Size Report
+
+ **Status:** First measurement - no baseline for comparison
+
+ **Total Size:** $(numfmt --to=iec-i --suffix=B $current_total)
+ EOF
+ exit 0
+ fi
+
+ baseline_total=$(jq -r '.total' baseline-sizes.json)
+ diff=$((current_total - baseline_total))
+
+ if [[ $baseline_total -gt 0 ]]; then
+ percent=$(awk "BEGIN {printf \"%.2f\", ($diff / $baseline_total) * 100}")
+ else
+ percent=0
+ fi
+
+ echo "size-diff=$diff" >> $GITHUB_OUTPUT
+ echo "size-diff-percent=$percent" >> $GITHUB_OUTPUT
+
+ # Only comment when size is bigger than baseline; optionally require per-image thresholds
+ THRESHOLDS="$SIZE_THRESHOLDS"
+ FAIL_THRESHOLDS="$FAIL_THRESHOLDS"
+ comment_triggered=false
+ fail_triggered=false
+ if [[ $diff -gt 0 ]]; then
+ if [[ -z "$THRESHOLDS" ]] || [[ "$THRESHOLDS" == "{}" ]]; then
+ comment_triggered=true
+ fi
+ fi
+
+ color="gray"
+ if (( $(awk "BEGIN {print ($percent > 0.01)}") )); then
+ color="red"
+ elif (( $(awk "BEGIN {print ($percent < -0.01)}") )); then
+ color="green"
+ fi
+
+ # Generate report
+ if [[ $diff -gt 0 ]]; then
+ emoji="📈"
+ badge=""
+ sign="+"
+ elif [[ $diff -lt 0 ]]; then
+ emoji="📉"
+ badge=""
+ sign="-"
+ else
+ emoji="➡️"
+ badge=""
+ sign=""
+ fi
+
+ cat > report.md << EOF
+ # 📦 Docker Image Size Report
+
+ ## $emoji Changes $badge
+
+ | Service | Current | Baseline | Change | Percent |
+ |---------|---------|----------|--------|---------|
+ | **sum of all images** | **$(numfmt --to=iec-i --suffix=B $current_total)** | **$(numfmt --to=iec-i --suffix=B $baseline_total)** | **${sign}$(numfmt --to=iec-i --suffix=B ${diff#-})** | $badge |
+ EOF
+
+ # Get services dynamically from current-sizes.json, sorted by size (largest first)
+ for service in $(jq -r '.services | to_entries | sort_by(-.value) | .[].key' current-sizes.json); do
+ current=$(jq -r ".services.\"$service\"" current-sizes.json)
+ baseline=$(jq -r ".services.\"$service\" // 0" baseline-sizes.json)
+ service_diff=$((current - baseline))
+
+ if [[ $baseline -gt 0 ]]; then
+ service_percent=$(awk "BEGIN {printf \"%.2f\", ($service_diff / $baseline) * 100}")
+ else
+ service_percent=0
+ fi
+
+ # Check per-image thresholds for comment trigger (only when size increased)
+ if [[ $diff -gt 0 ]] && [[ -n "$THRESHOLDS" ]] && [[ "$THRESHOLDS" != "{}" ]]; then
+ threshold_mb=$(echo "$THRESHOLDS" | jq -r ".\"$service\".mb // empty")
+ threshold_pct=$(echo "$THRESHOLDS" | jq -r ".\"$service\".percent // empty")
+ if [[ -n "$threshold_mb" ]] || [[ -n "$threshold_pct" ]]; then
+ exceeded=false
+ if [[ -n "$threshold_mb" ]] && [[ $service_diff -ge $(awk "BEGIN {printf \"%.0f\", $threshold_mb * 1048576}") ]]; then
+ exceeded=true
+ fi
+ if [[ -n "$threshold_pct" ]] && [[ $service_percent != "0.00" ]] && (( $(awk "BEGIN {print ($service_percent >= $threshold_pct)}") )); then
+ exceeded=true
+ fi
+ [[ "$exceeded" == "true" ]] && comment_triggered=true
+ fi
+ fi
+
+ # Check per-image fail thresholds (task fails when exceeded)
+ if [[ $diff -gt 0 ]] && [[ -n "$FAIL_THRESHOLDS" ]] && [[ "$FAIL_THRESHOLDS" != "{}" ]]; then
+ fail_mb=$(echo "$FAIL_THRESHOLDS" | jq -r ".\"$service\".mb // empty")
+ fail_pct=$(echo "$FAIL_THRESHOLDS" | jq -r ".\"$service\".percent // empty")
+ if [[ -n "$fail_mb" ]] || [[ -n "$fail_pct" ]]; then
+ exceeded=false
+ if [[ -n "$fail_mb" ]] && [[ $service_diff -ge $(awk "BEGIN {printf \"%.0f\", $fail_mb * 1048576}") ]]; then
+ exceeded=true
+ fi
+ if [[ -n "$fail_pct" ]] && [[ $service_percent != "0.00" ]] && (( $(awk "BEGIN {print ($service_percent >= $fail_pct)}") )); then
+ exceeded=true
+ fi
+ [[ "$exceeded" == "true" ]] && fail_triggered=true
+ fi
+ fi
+
+ color="gray"
+ if (( $(awk "BEGIN {print ($service_percent > 0.01)}") )); then
+ color="red"
+ elif (( $(awk "BEGIN {print ($service_percent < -0.01)}") )); then
+ color="green"
+ fi
+
+ if [[ $service_diff -gt 0 ]]; then
+ badge=""
+ sign="+"
+ elif [[ $service_diff -lt 0 ]]; then
+ badge=""
+ sign="-"
+ else
+ badge=""
+ sign=""
+ fi
+
+ echo "| $service | $(numfmt --to=iec-i --suffix=B $current) | $(numfmt --to=iec-i --suffix=B $baseline) | ${sign}$(numfmt --to=iec-i --suffix=B ${service_diff#-}) | $badge |" >> report.md
+ done
+
+ # Generate historical trend chart
+ echo "" >> report.md
+ echo "## 📊 Historical Trend" >> report.md
+ echo "" >> report.md
+
+ # Load history and generate mermaid chart
+ history_count=$(jq 'length' history-data.json)
+
+ if [[ $history_count -gt 0 ]]; then
+ # Get all services from current build
+ all_services=$(jq -r '.services | keys | .[]' current-sizes.json | sort)
+
+ # Generate Mermaid chart data
+ x_labels=""
+ declare -A service_data
+
+ # Initialize service data arrays
+ for service in $all_services; do
+ service_data[$service]=""
+ done
+
+ # Process historical data
+ while IFS= read -r line; do
+ timestamp=$(echo "$line" | jq -r '.timestamp')
+ date_label=$(date -d "$timestamp" +"%m/%d %H:%M")
+
+ if [[ -z "$x_labels" ]]; then
+ x_labels="\"$date_label\""
+ else
+ x_labels="$x_labels, \"$date_label\""
+ fi
+
+ # Add data point for each service
+ for service in $all_services; do
+ size=$(echo "$line" | jq -r ".services.\"$service\" // 0")
+ size_gb=$(awk "BEGIN {printf \"%.2f\", $size / 1073741824}")
+
+ if [[ -z "${service_data[$service]}" ]]; then
+ service_data[$service]="$size_gb"
+ else
+ service_data[$service]="${service_data[$service]}, $size_gb"
+ fi
+ done
+ done < <(jq -c '.[]' history-data.json)
+
+ # Add current PR as last point
+ current_date=$(date -u +"%m/%d %H:%M")
+ x_labels="$x_labels, \"$current_date (PR)\""
+
+ for service in $all_services; do
+ size=$(jq -r ".services.\"$service\" // 0" current-sizes.json)
+ size_gb=$(awk "BEGIN {printf \"%.2f\", $size / 1073741824}")
+ service_data[$service]="${service_data[$service]}, $size_gb"
+ done
+
+ # Generate mermaid chart with multiple lines
+ cat >> report.md << EOF
+ \`\`\`mermaid
+ ---
+ config:
+ theme: "dark"
+ xyChart:
+ width: 900
+ height: 400
+ ---
+ xychart
+ title "Image Size Evolution by Service (Last 30 Days + This PR)"
+ x-axis [$x_labels]
+ y-axis "Size (GB)" 0 --> 0.5
+ EOF
+
+ # Add a line for each service
+ for service in $all_services; do
+ echo " line \"$service\" [${service_data[$service]}]" >> report.md
+ done
+
+ cat >> report.md << 'EOF'
+ ```
+
+ EOF
+
+ # Add summary stats
+ min_size=$(jq '[.[].total] | min' history-data.json)
+ max_size=$(jq '[.[].total] | max' history-data.json)
+ avg_size=$(jq '[.[].total] | add / length' history-data.json)
+
+ cat >> report.md << EOF
+ **Statistics (last $history_count days):**
+ - 📊 Average: $(numfmt --to=iec-i --suffix=B ${avg_size%.*})
+ - ⬇️ Minimum: $(numfmt --to=iec-i --suffix=B $min_size)
+ - ⬆️ Maximum: $(numfmt --to=iec-i --suffix=B $max_size)
+ - 🎯 Current PR: $(numfmt --to=iec-i --suffix=B $current_total)
+
+ EOF
+
+ else
+ cat >> report.md << 'EOF'
+ *No historical data available yet. History tracking starts after merging to develop.*
+
+ EOF
+ fi
+
+ cat >> report.md << EOF
+
+ ℹ️ About this report
+
+ This report compares Docker image sizes from this build against the \`$BASELINE_TAG\` baseline.
+
+ - **Tag:** \`$TAG\`
+ - **Baseline:** \`$BASELINE_TAG\`
+ - **Timestamp:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")
+ - **Historical data points:** $history_count
+
+
1024,C&&(t.dump&&W2===t.dump.charCodeAt(0)?S+="?":S+="? "),S+=t.dump,C&&(S+=e_(t,e)),Td(t,e+1,E,!0,C)&&(t.dump&&W2===t.dump.charCodeAt(0)?S+=":":S+=": ",S+=t.dump,a+=S));t.tag=n,t.dump=a||"{}"}function zte(t,e,r){var s,a,n,c,f,p;for(a=r?t.explicitTypes:t.implicitTypes,n=0,c=a.length;n {var n,c;if(Object.getPrototypeOf(s).toString()==="[object Set]")if(typeof a?.coercions<"u"){if(typeof a?.coercion>"u")return mr(a,"Unbound coercion result");let f=[...s],p=[...s];if(!r(p,Object.assign(Object.assign({},a),{coercion:void 0})))return!1;let h=()=>p.some((E,C)=>E!==f[C])?new Set(p):s;return a.coercions.push([(n=a.p)!==null&&n!==void 0?n:".",X2(a.coercion,s,h)]),!0}else{let f=!0;for(let p of s)if(f=t(p,Object.assign({},a))&&f,!f&&a?.errors==null)break;return f}if(typeof a?.coercions<"u"){if(typeof a?.coercion>"u")return mr(a,"Unbound coercion result");let f={value:s};return r(s,Object.assign(Object.assign({},a),{coercion:Wf(f,"value")}))?(a.coercions.push([(c=a.p)!==null&&c!==void 0?c:".",X2(a.coercion,s,()=>new Set(f.value))]),!0):!1}return mr(a,`Expected a set (got ${ti(s)})`)}})}function MWe(t,e){let r=Tx(Rx([t,e])),s=Fx(e,{keys:t});return Wr({test:(a,n)=>{var c,f,p;if(Object.getPrototypeOf(a).toString()==="[object Map]")if(typeof n?.coercions<"u"){if(typeof n?.coercion>"u")return mr(n,"Unbound coercion result");let h=[...a],E=[...a];if(!r(E,Object.assign(Object.assign({},n),{coercion:void 0})))return!1;let C=()=>E.some((S,P)=>S[0]!==h[P][0]||S[1]!==h[P][1])?new Map(E):a;return n.coercions.push([(c=n.p)!==null&&c!==void 0?c:".",X2(n.coercion,a,C)]),!0}else{let h=!0;for(let[E,C]of a)if(h=t(E,Object.assign({},n))&&h,!h&&n?.errors==null||(h=e(C,Object.assign(Object.assign({},n),{p:s0(n,E)}))&&h,!h&&n?.errors==null))break;return h}if(typeof n?.coercions<"u"){if(typeof n?.coercion>"u")return mr(n,"Unbound coercion result");let h={value:a};return Array.isArray(a)?r(a,Object.assign(Object.assign({},n),{coercion:void 0}))?(n.coercions.push([(f=n.p)!==null&&f!==void 0?f:".",X2(n.coercion,a,()=>new Map(h.value))]),!0):!1:s(a,Object.assign(Object.assign({},n),{coercion:Wf(h,"value")}))?(n.coercions.push([(p=n.p)!==null&&p!==void 0?p:".",X2(n.coercion,a,()=>new Map(Object.entries(h.value)))]),!0):!1}return mr(n,`Expected a map (got ${ti(a)})`)}})}function Rx(t,{delimiter:e}={}){let r=Ore(t.length);return Wr({test:(s,a)=>{var n;if(typeof s=="string"&&typeof e<"u"&&typeof a?.coercions<"u"){if(typeof a?.coercion>"u")return mr(a,"Unbound coercion result");s=s.split(e),a.coercions.push([(n=a.p)!==null&&n!==void 0?n:".",a.coercion.bind(null,s)])}if(!Array.isArray(s))return mr(a,`Expected a tuple (got ${ti(s)})`);let c=r(s,Object.assign({},a));for(let f=0,p=s.length;f {var n;if(Array.isArray(s)&&typeof a?.coercions<"u")return typeof a?.coercion>"u"?mr(a,"Unbound coercion result"):r(s,Object.assign(Object.assign({},a),{coercion:void 0}))?(s=Object.fromEntries(s),a.coercions.push([(n=a.p)!==null&&n!==void 0?n:".",a.coercion.bind(null,s)]),!0):!1;if(typeof s!="object"||s===null)return mr(a,`Expected an object (got ${ti(s)})`);let c=Object.keys(s),f=!0;for(let p=0,h=c.length;p `:`[${P}]`)}s.push(...this.arity.leading.map(c=>`<${c}>`)),this.arity.extra===Hl?s.push("..."):s.push(...this.arity.extra.map(c=>`[${c}]`)),s.push(...this.arity.trailing.map(c=>`<${c}>`))}return{usage:s.join(" "),options:a}}compile(){if(typeof this.context>"u")throw new Error("Assertion failed: No context attached");let e=Mre(),r=En.InitialNode,s=this.usage().usage,a=this.options.filter(f=>f.required).map(f=>f.nameSet);r=Ou(e,_l()),Ia(e,En.InitialNode,ei.StartOfInput,r,["setCandidateState",{candidateUsage:s,requiredOptions:a}]);let n=this.arity.proxy?"always":"isNotOptionLike",c=this.paths.length>0?this.paths:[[]];for(let f of c){let p=r;if(f.length>0){let S=Ou(e,_l());BE(e,p,S),this.registerOptions(e,S),p=S}for(let S=0;S1)r=e;else if(this.head)s=this.head.next,r=this.head.value;else throw new TypeError("Reduce of empty list with no initial value");for(var a=0;s!==null;a++)r=t(r,s.value,a),s=s.next;return r};Fn.prototype.reduceReverse=function(t,e){var r,s=this.tail;if(arguments.length>1)r=e;else if(this.tail)s=this.tail.prev,r=this.tail.value;else throw new TypeError("Reduce of empty list with no initial value");for(var a=this.length-1;s!==null;a--)r=t(r,s.value,a),s=s.prev;return r};Fn.prototype.toArray=function(){for(var t=new Array(this.length),e=0,r=this.head;r!==null;e++)t[e]=r.value,r=r.next;return t};Fn.prototype.toArrayReverse=function(){for(var t=new Array(this.length),e=0,r=this.tail;r!==null;e++)t[e]=r.value,r=r.prev;return t};Fn.prototype.slice=function(t,e){e=e||this.length,e<0&&(e+=this.length),t=t||0,t<0&&(t+=this.length);var r=new Fn;if(e0&&(Ae=p.slice(0,E),p=p.slice(E),C-=E),ye&&I===!0&&C>0?(ye=p.slice(0,C),se=p.slice(C)):I===!0?(ye="",se=p):ye=p,ye&&ye!==""&&ye!=="/"&&ye!==p&&Goe(ye.charCodeAt(ye.length-1))&&(ye=ye.slice(0,-1)),r.unescape===!0&&(se&&(se=_oe.removeBackslashes(se)),ye&&W===!0&&(ye=_oe.removeBackslashes(ye)));let Z={prefix:Ae,input:t,start:E,base:ye,glob:se,isBrace:S,isBracket:P,isGlob:I,isExtglob:R,isGlobstar:N,negated:ee,negatedExtglob:ie};if(r.tokens===!0&&(Z.maxDepth=0,Goe(pe)||c.push(Be),Z.tokens=c),r.parts===!0||r.tokens===!0){let De;for(let Re=0;Re("lookup"in r||(r.lookup=this.lookup),e[pI](r,s))}uninstall(e){if(Oue(e),e[pI]){if(e[iH]!==this)throw new Error("The agent is not owned by this CacheableLookup instance");e.createConnection=e[pI],delete e[pI],delete e[iH]}}updateInterfaceInfo(){let{_iface:e}=this;this._iface=Lue(),(e.has4&&!this._iface.has4||e.has6&&!this._iface.has6)&&this._cache.clear()}clear(e){if(e){this._cache.delete(e);return}this._cache.clear()}};sH.exports=HQ;sH.exports.default=HQ});var Gue=_((BMt,oH)=>{"use strict";var _et=typeof URL>"u"?Ie("url").URL:URL,Het="text/plain",jet="us-ascii",Hue=(t,e)=>e.some(r=>r instanceof RegExp?r.test(t):r===t),Get=(t,{stripHash:e})=>{let r=t.match(/^data:([^,]*?),([^#]*?)(?:#(.*))?$/);if(!r)throw new Error(`Invalid URL: ${t}`);let s=r[1].split(";"),a=r[2],n=e?"":r[3],c=!1;s[s.length-1]==="base64"&&(s.pop(),c=!0);let f=(s.shift()||"").toLowerCase(),h=[...s.map(E=>{let[C,S=""]=E.split("=").map(P=>P.trim());return C==="charset"&&(S=S.toLowerCase(),S===jet)?"":`${C}${S?`=${S}`:""}`}).filter(Boolean)];return c&&h.push("base64"),(h.length!==0||f&&f!==Het)&&h.unshift(f),`data:${h.join(";")},${c?a.trim():a}${n?`#${n}`:""}`},jue=(t,e)=>{if(e={defaultProtocol:"http:",normalizeProtocol:!0,forceHttp:!1,forceHttps:!1,stripAuthentication:!0,stripHash:!1,stripWWW:!0,removeQueryParameters:[/^utm_\w+/i],removeTrailingSlash:!0,removeDirectoryIndex:!1,sortQueryParameters:!0,...e},Reflect.has(e,"normalizeHttps"))throw new Error("options.normalizeHttps is renamed to options.forceHttp");if(Reflect.has(e,"normalizeHttp"))throw new Error("options.normalizeHttp is renamed to options.forceHttps");if(Reflect.has(e,"stripFragment"))throw new Error("options.stripFragment is renamed to options.stripHash");if(t=t.trim(),/^data:/i.test(t))return Get(t,e);let r=t.startsWith("//");!r&&/^\.*\//.test(t)||(t=t.replace(/^(?!(?:\w+:)?\/\/)|^\/\//,e.defaultProtocol));let a=new _et(t);if(e.forceHttp&&e.forceHttps)throw new Error("The `forceHttp` and `forceHttps` options cannot be used together");if(e.forceHttp&&a.protocol==="https:"&&(a.protocol="http:"),e.forceHttps&&a.protocol==="http:"&&(a.protocol="https:"),e.stripAuthentication&&(a.username="",a.password=""),e.stripHash&&(a.hash=""),a.pathname&&(a.pathname=a.pathname.replace(/((?!:).|^)\/{2,}/g,(n,c)=>/^(?!\/)/g.test(c)?`${c}/`:"/")),a.pathname&&(a.pathname=decodeURI(a.pathname)),e.removeDirectoryIndex===!0&&(e.removeDirectoryIndex=[/^index\.[a-z]+$/]),Array.isArray(e.removeDirectoryIndex)&&e.removeDirectoryIndex.length>0){let n=a.pathname.split("/"),c=n[n.length-1];Hue(c,e.removeDirectoryIndex)&&(n=n.slice(0,n.length-1),a.pathname=n.slice(1).join("/")+"/")}if(a.hostname&&(a.hostname=a.hostname.replace(/\.$/,""),e.stripWWW&&/^www\.([a-z\-\d]{2,63})\.([a-z.]{2,5})$/.test(a.hostname)&&(a.hostname=a.hostname.replace(/^www\./,""))),Array.isArray(e.removeQueryParameters))for(let n of[...a.searchParams.keys()])Hue(n,e.removeQueryParameters)&&a.searchParams.delete(n);return e.sortQueryParameters&&a.searchParams.sort(),e.removeTrailingSlash&&(a.pathname=a.pathname.replace(/\/$/,"")),t=a.toString(),(e.removeTrailingSlash||a.pathname==="/")&&a.hash===""&&(t=t.replace(/\/$/,"")),r&&!e.normalizeProtocol&&(t=t.replace(/^http:\/\//,"//")),e.stripProtocol&&(t=t.replace(/^(?:https?:)?\/\//,"")),t};oH.exports=jue;oH.exports.default=jue});var Yue=_((vMt,Wue)=>{Wue.exports=que;function que(t,e){if(t&&e)return que(t)(e);if(typeof t!="function")throw new TypeError("need wrapper function");return Object.keys(t).forEach(function(s){r[s]=t[s]}),r;function r(){for(var s=new Array(arguments.length),a=0;a{"use strict";var Ist=DI(),I0e=vR(),qI=Ie("fs"),Cst=GI(),E0e=Ie("path"),aG=FI();w0e.exports=(t,e,r)=>{typeof t=="function"?(r=t,e=null,t={}):Array.isArray(t)&&(e=t,t={}),typeof e=="function"&&(r=e,e=null),e?e=Array.from(e):e=[];let s=Ist(t);if(s.sync&&typeof r=="function")throw new TypeError("callback not supported for sync tar functions");if(!s.file&&typeof r=="function")throw new TypeError("callback only supported with file option");return e.length&&Bst(s,e),s.noResume||wst(s),s.file&&s.sync?vst(s):s.file?Sst(s,r):C0e(s)};var wst=t=>{let e=t.onentry;t.onentry=e?r=>{e(r),r.resume()}:r=>r.resume()},Bst=(t,e)=>{let r=new Map(e.map(n=>[aG(n),!0])),s=t.filter,a=(n,c)=>{let f=c||E0e.parse(n).root||".",p=n===f?!1:r.has(n)?r.get(n):a(E0e.dirname(n),f);return r.set(n,p),p};t.filter=s?(n,c)=>s(n,c)&&a(aG(n)):n=>a(aG(n))},vst=t=>{let e=C0e(t),r=t.file,s=!0,a;try{let n=qI.statSync(r),c=t.maxReadSize||16*1024*1024;if(n.size