Compare commits

...

44 commits

Author SHA1 Message Date
Harrison Healey
415ee16c51
Update web app packages to 11.5.0 2026-03-26 10:30:51 -04:00
Mattermost Build
9f26163765
fix: webhook server connection error in cypress (#35471) (#35474)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
* fix: webhook server connection error in cypress

* fix shared volume with cypress

(cherry picked from commit 014fee5950)

Co-authored-by: sabril <5334504+saturninoabril@users.noreply.github.com>
2026-03-04 13:29:57 +00:00
Mattermost Build
fe45be286e
Fix E2E-only PRs and duplicate E2E test runs after PR merges (#35368) (#35473)
* use master or release base branch correctly for e2e-test-change only

* prevent duplicate run for PR already merged to master or release branch

(cherry picked from commit 4e81e504c4)

Co-authored-by: sabril <5334504+saturninoabril@users.noreply.github.com>
2026-03-04 13:14:26 +00:00
sabril
5e1cadac42
fix: MM-T388 E2E tests (#35470) 2026-03-04 19:41:55 +08:00
Alejandro García Montoro
a39e25ec87
Prepackage Playbooks FIPS v2.7.0 (#35450)
Some checks failed
Server CI / Compute Go Version (push) Has been cancelled
Web App CI / check-lint (push) Has been cancelled
Server CI / Check mocks (push) Has been cancelled
Server CI / Check go mod tidy (push) Has been cancelled
Server CI / check-style (push) Has been cancelled
Server CI / Check serialization methods for hot structs (push) Has been cancelled
Server CI / Vet API (push) Has been cancelled
Server CI / Check migration files (push) Has been cancelled
Server CI / Generate email templates (push) Has been cancelled
Server CI / Check store layers (push) Has been cancelled
Server CI / Check mmctl docs (push) Has been cancelled
Server CI / Postgres with binary parameters (push) Has been cancelled
Server CI / Postgres (push) Has been cancelled
Server CI / Postgres (FIPS) (push) Has been cancelled
Server CI / Generate Test Coverage (push) Has been cancelled
Server CI / Run mmctl tests (push) Has been cancelled
Server CI / Run mmctl tests (FIPS) (push) Has been cancelled
Server CI / Build mattermost server app (push) Has been cancelled
Web App CI / check-i18n (push) Has been cancelled
Web App CI / check-types (push) Has been cancelled
Web App CI / test (platform) (push) Has been cancelled
Web App CI / test (mattermost-redux) (push) Has been cancelled
Web App CI / test (channels shard 1/4) (push) Has been cancelled
Web App CI / test (channels shard 2/4) (push) Has been cancelled
Web App CI / test (channels shard 3/4) (push) Has been cancelled
Web App CI / test (channels shard 4/4) (push) Has been cancelled
Web App CI / upload-coverage (push) Has been cancelled
Web App CI / build (push) Has been cancelled
2026-03-03 07:57:04 +02:00
Mattermost Build
3b35529580
[MM-67640] Fix checks around autotranslations permission (#35351) (#35444)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge

(cherry picked from commit 5fdec79007)

Co-authored-by: Daniel Espino García <larkox@gmail.com>
2026-03-02 09:37:38 +00:00
Mattermost Build
77b8a956a9
MM-67659 Fix marking threads as read over the WebSocket (#35384) (#35438)
Some checks failed
Server CI / Compute Go Version (push) Has been cancelled
Web App CI / check-lint (push) Has been cancelled
Server CI / Check mocks (push) Has been cancelled
Server CI / Check go mod tidy (push) Has been cancelled
Server CI / check-style (push) Has been cancelled
Server CI / Check serialization methods for hot structs (push) Has been cancelled
Server CI / Vet API (push) Has been cancelled
Server CI / Check migration files (push) Has been cancelled
Server CI / Generate email templates (push) Has been cancelled
Server CI / Check store layers (push) Has been cancelled
Server CI / Check mmctl docs (push) Has been cancelled
Server CI / Postgres with binary parameters (push) Has been cancelled
Server CI / Postgres (push) Has been cancelled
Server CI / Postgres (FIPS) (push) Has been cancelled
Server CI / Generate Test Coverage (push) Has been cancelled
Server CI / Run mmctl tests (push) Has been cancelled
Server CI / Run mmctl tests (FIPS) (push) Has been cancelled
Server CI / Build mattermost server app (push) Has been cancelled
Web App CI / check-i18n (push) Has been cancelled
Web App CI / check-types (push) Has been cancelled
Web App CI / test (platform) (push) Has been cancelled
Web App CI / test (mattermost-redux) (push) Has been cancelled
Web App CI / test (channels shard 1/4) (push) Has been cancelled
Web App CI / test (channels shard 2/4) (push) Has been cancelled
Web App CI / test (channels shard 3/4) (push) Has been cancelled
Web App CI / test (channels shard 4/4) (push) Has been cancelled
Web App CI / upload-coverage (push) Has been cancelled
Web App CI / build (push) Has been cancelled
Automatic Merge
2026-02-27 17:39:29 +01:00
Mattermost Build
f1e1707c9e
Update plugin-calls to v1.11.1 (#35427) (#35434)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-26 23:39:28 +01:00
Mattermost Build
24bdd773d9
bumps go version to 1.24.13 (#35289) (#35429)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-25 23:09:31 +01:00
Eva Sarafianou
1177c08b29
update edwards25519 (#35425) 2026-02-25 16:13:09 -05:00
Mattermost Build
06a7d6b9dc
[MM-67235] Add support for autotranslations on GM and DM (#35255) (#35428)
Automatic Merge
2026-02-25 22:09:29 +01:00
Mattermost Build
e7c8eb43da
bumps base image version to build new mattermost-build-server images (#35281) (#35430)
Automatic Merge
2026-02-25 21:09:30 +01:00
Alejandro García Montoro
094f2c06ce
[MM-67671] Add CJK Post search support for PostgreSQL (#35260) (#35426)
Automatic Merge
2026-02-25 19:39:30 +01:00
Mattermost Build
67a5ad08ed
update default worker count for autotranslations (#35355) (#35398)
Some checks failed
Server CI / Compute Go Version (push) Has been cancelled
Web App CI / check-lint (push) Has been cancelled
Server CI / Check mocks (push) Has been cancelled
Server CI / Check go mod tidy (push) Has been cancelled
Server CI / check-style (push) Has been cancelled
Server CI / Check serialization methods for hot structs (push) Has been cancelled
Server CI / Vet API (push) Has been cancelled
Server CI / Check migration files (push) Has been cancelled
Server CI / Generate email templates (push) Has been cancelled
Server CI / Check store layers (push) Has been cancelled
Server CI / Check mmctl docs (push) Has been cancelled
Server CI / Postgres with binary parameters (push) Has been cancelled
Server CI / Postgres (push) Has been cancelled
Server CI / Postgres (FIPS) (push) Has been cancelled
Server CI / Generate Test Coverage (push) Has been cancelled
Server CI / Run mmctl tests (push) Has been cancelled
Server CI / Run mmctl tests (FIPS) (push) Has been cancelled
Server CI / Build mattermost server app (push) Has been cancelled
Web App CI / check-i18n (push) Has been cancelled
Web App CI / check-types (push) Has been cancelled
Web App CI / test (platform) (push) Has been cancelled
Web App CI / test (mattermost-redux) (push) Has been cancelled
Web App CI / test (channels shard 1/4) (push) Has been cancelled
Web App CI / test (channels shard 2/4) (push) Has been cancelled
Web App CI / test (channels shard 3/4) (push) Has been cancelled
Web App CI / test (channels shard 4/4) (push) Has been cancelled
Web App CI / upload-coverage (push) Has been cancelled
Web App CI / build (push) Has been cancelled
Automatic Merge
2026-02-23 22:09:29 +01:00
Mattermost Build
6e28371d19
[MM-67587] Exclude system messages from autotranslation queue (#35267) (#35397)
Automatic Merge
2026-02-23 21:39:27 +01:00
Mattermost Build
01d3155308
GetAllForObject, use Master instead of replica (#35356) (#35396)
Automatic Merge
2026-02-23 21:09:27 +01:00
Mattermost Build
56a549db4b
MM-67522 Add tests for syncing user statuses (#35269) (#35400)
Some checks failed
Server CI / Compute Go Version (push) Has been cancelled
Web App CI / check-lint (push) Has been cancelled
Server CI / Check mocks (push) Has been cancelled
Server CI / Check go mod tidy (push) Has been cancelled
Server CI / check-style (push) Has been cancelled
Server CI / Check serialization methods for hot structs (push) Has been cancelled
Server CI / Vet API (push) Has been cancelled
Server CI / Check migration files (push) Has been cancelled
Server CI / Generate email templates (push) Has been cancelled
Server CI / Check store layers (push) Has been cancelled
Server CI / Check mmctl docs (push) Has been cancelled
Server CI / Postgres with binary parameters (push) Has been cancelled
Server CI / Postgres (push) Has been cancelled
Server CI / Postgres (FIPS) (push) Has been cancelled
Server CI / Generate Test Coverage (push) Has been cancelled
Server CI / Run mmctl tests (push) Has been cancelled
Server CI / Run mmctl tests (FIPS) (push) Has been cancelled
Server CI / Build mattermost server app (push) Has been cancelled
Web App CI / check-i18n (push) Has been cancelled
Web App CI / check-types (push) Has been cancelled
Web App CI / test (platform) (push) Has been cancelled
Web App CI / test (mattermost-redux) (push) Has been cancelled
Web App CI / test (channels shard 1/4) (push) Has been cancelled
Web App CI / test (channels shard 2/4) (push) Has been cancelled
Web App CI / test (channels shard 3/4) (push) Has been cancelled
Web App CI / test (channels shard 4/4) (push) Has been cancelled
Web App CI / upload-coverage (push) Has been cancelled
Web App CI / build (push) Has been cancelled
* MM-67522 Add tests for syncing user statuses

* Clean up newly added tests

* Fix style

* Use SyncResponse.StatusErrors when statuses fail to sync

(cherry picked from commit 033867a344)

Co-authored-by: Harrison Healey <harrisonmhealey@gmail.com>
2026-02-20 21:50:30 +00:00
Nick Misasi
11b4428fc3
[MM-67605] Add DCR redirect URI allowlist for OAuth DCR (#35291) (#35394)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
* [MM-67605] Add DCR redirect URI allowlist enforcement

Introduce ServiceSettings.DCRRedirectURIAllowlist with glob-based validation and enforce it during OAuth dynamic client registration to block unapproved redirect URIs. Add System Console wiring and tests for config validation, wildcard matching semantics, API error behavior, and localhost wildcard support.



* Fix pre-commit checks: TypeScript type assertion, gofmt, and regenerate CI artifacts

- admin_definition_dcr_allowlist.test.tsx: Add AdminDefinitionSettingInput type assertion for 'multiple' property
- oauth_dcr_test.go: Fix comment spacing (gofmt)
- Regenerate mocks, go.sum, gen-serialized, mmctl-docs per CI requirements



* Revert unnecessary pre-commit regenerations

Revert mmctl docs, mocks, go.sum, and gen-serialized to master. Keep only
the TypeScript and gofmt fixes from the previous commit.



* Fix import order in admin_definition_dcr_allowlist.test.tsx



* Fix i18n

* Update server/public/model/oauth_dcr.go



* Fix

---------

Co-authored-by: Cursor <cursoragent@cursor.com>
Co-authored-by: Eva Sarafianou <eva.sarafianou@gmail.com>
Co-authored-by: Mattermost Build <build@mattermost.com>
2026-02-20 18:28:41 +02:00
Mattermost Build
ee4d5c34c3
[MM-67565] Prevent setting protected=true on fields without source_plugin_id (#35265) (#35377)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-20 03:39:27 +02:00
Mattermost Build
c94ee4192b
[MM-67564] Reduce channel banner height to 24px with 13px font (#35338) (#35370)
Automatic Merge
2026-02-19 09:39:28 +02:00
Mattermost Build
9b38d83f16
libre key fix (#35297) (#35357)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-19 09:09:33 +02:00
Mattermost Build
e6e3ef395b
E2E/Test: Increase parallel tests and removed smoke tests (#35271) (#35367)
* test: increase parallel tests and removed smoke tests both in cypress and playwright

* add duration and retest info

* indicate overall, first-pass and re-run run and test durations

(cherry picked from commit 0ec4a474d5)

Co-authored-by: sabril <5334504+saturninoabril@users.noreply.github.com>
2026-02-19 03:42:07 +00:00
Mattermost Build
a7a62fdc60
SEC-9513 feat: e2e tests on master and releases (#35205) (#35364)
* feat: e2e tests on master and releases

* (for pipelines testing only, will be removed after)

* remove test pipelines

(cherry picked from commit a711b22717)

Co-authored-by: sabril <5334504+saturninoabril@users.noreply.github.com>
2026-02-19 03:11:26 +00:00
Mattermost Build
2f83be03b0
[MM-67563] Change websocket format for translation update events (#35268) (#35350)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-18 18:39:27 +02:00
Mattermost Build
2fc50134eb
[MM-67530] Only show autotranslation permissions to licensed users (#35283) (#35348)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-18 15:09:27 +02:00
Mattermost Build
4efbe7ca13
[MM-67531] Add beta label to auto translations feature (#35284) (#35347)
Automatic Merge
2026-02-18 14:39:27 +02:00
Mattermost Build
d1f1eed2a1
separate websocket event for translations metrics (#35296) (#35345)
Automatic Merge
2026-02-18 12:39:27 +02:00
Mattermost Build
1ec0bcbedd
fix guest user import when guest user doesn't have any memberships (#30975) (#35344)
Automatic Merge
2026-02-18 09:39:27 +02:00
Mattermost Build
cd1fe0a98f
Rename "Self-Deleting Messages" to "Burn-on-Read Messages" (#35318) (#35331)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
* Rename "Self-Deleting Messages" to "Burn-on-Read Messages"

Updated the Burn on Read feature naming in the System Console for consistency with the product terminology and made all Posts subsection titles translatable for better internationalization support.

- Renamed section title from "Self-Deleting Messages" to "Burn-on-Read Messages"
- Updated section description to clarify that messages delete after being read (removed "or sent")
- Added translation keys for all 6 Posts subsection titles
- Updated type definitions and UI components to support translatable subsection titles



* Linting

---------


(cherry picked from commit 28406fbe23)

Co-authored-by: Maria A Nunez <maria.nunez@mattermost.com>
Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-17 13:23:14 -05:00
Mattermost Build
35408e8b14
MM-67099 - Membership Sync fix (#35230) (#35322)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-17 10:09:27 +02:00
Mattermost Build
86ff1d0ad1
Mm 66813 sso callback metadata (#34955) (#35319)
Automatic Merge
2026-02-17 09:39:28 +02:00
Mattermost Build
3571847419
[MM-67488] Set autotranslation feature flag default to true (#35288) (#35317)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-16 16:39:28 +02:00
Mattermost Build
0de8353158
MM-66886 Add rate limiting to login endpoint (#34943) (#35310)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-16 12:39:27 +02:00
Mattermost Build
7247965146
[MM-67487] Fix posts since endpoint for auto translations (#35198) (#35294)
Some checks failed
Server CI / Compute Go Version (push) Has been cancelled
Web App CI / check-lint (push) Has been cancelled
Server CI / Check mocks (push) Has been cancelled
Server CI / Check go mod tidy (push) Has been cancelled
Server CI / check-style (push) Has been cancelled
Server CI / Check serialization methods for hot structs (push) Has been cancelled
Server CI / Vet API (push) Has been cancelled
Server CI / Check migration files (push) Has been cancelled
Server CI / Generate email templates (push) Has been cancelled
Server CI / Check store layers (push) Has been cancelled
Server CI / Check mmctl docs (push) Has been cancelled
Server CI / Postgres with binary parameters (push) Has been cancelled
Server CI / Postgres (push) Has been cancelled
Server CI / Postgres (FIPS) (push) Has been cancelled
Server CI / Generate Test Coverage (push) Has been cancelled
Server CI / Run mmctl tests (push) Has been cancelled
Server CI / Run mmctl tests (FIPS) (push) Has been cancelled
Server CI / Build mattermost server app (push) Has been cancelled
Web App CI / check-i18n (push) Has been cancelled
Web App CI / check-types (push) Has been cancelled
Web App CI / test (platform) (push) Has been cancelled
Web App CI / test (mattermost-redux) (push) Has been cancelled
Web App CI / test (channels shard 1/4) (push) Has been cancelled
Web App CI / test (channels shard 2/4) (push) Has been cancelled
Web App CI / test (channels shard 3/4) (push) Has been cancelled
Web App CI / test (channels shard 4/4) (push) Has been cancelled
Web App CI / upload-coverage (push) Has been cancelled
Web App CI / build (push) Has been cancelled
(cherry picked from commit 96899133c0)

Co-authored-by: Ben Cooke <benkcooke@gmail.com>
2026-02-13 16:36:35 -05:00
Mattermost Build
37cca32fc9
MM-67312: Restrict Burn-on-Read for self DMs and bot users (#35116) (#35266)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-13 09:09:26 +02:00
Mattermost Build
8305aa2b1e
Changes for BoR post soft-deletion (#35100) (#35262)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-12 13:53:29 +02:00
Mattermost Build
65022587e0
MM-66789: Include log viewer (system console) in log root path validation (#35221) (#35261)
Automatic Merge
2026-02-12 10:53:27 +02:00
Mattermost Build
19c93ff7ca
MM-67335 Fix export files having mismatched permissions (#35182) (#35244)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
* MM-67335 Fix export files having mismatched permissions

* Update test output when failing

(cherry picked from commit 6cd2df33ea)

Co-authored-by: Harrison Healey <harrisonmhealey@gmail.com>
2026-02-11 16:37:16 +00:00
Mattermost Build
6a8f3e4cdc
Autotranslation Frontend integration (#34717) (#35235)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
* AutoTranslate config settings

* comment out Agents provider

* Add auto translate timeout config validation

* i18n messages for autotranslation config validation

* fix test

* validate url for libreTranslate

* Feedback review

* Admin Console UI for Auto-Translation

* fix admin console conditional section display

* i18n

* removed unintentional change



* update admin.general.localization.autoTranslateProviderDescription newline

* fix lint

* Fix types

* UX feedback review

* fix typo in i18n

* Fix AutoTranslation feature flag

* feedback review

* Fix test default values

* feedback review

* re-add isHidden property to feature discovery

* Database Migrations, Indexes and Methods for Auto-Translation

* i18n

* fix retrylayer and storetest

* Fix search query

* fix lint

* remove the request.CTX and modify Translation model

* fix lint and external url

* Add settings to playwright

* Add empty as a valid value for the Provider

* Update jsonb queries

* Fix queries and add model methods

* fix go lint

* go lint fix 2

* fix db migrations

* feedback review + store cache

* increase migration number

* cleanup autotranslation store cache

* use NULL as objectType for posts

* fix bad merge

* fix tests

* add missing i18n

* Active WebSocket Connection User Tracking

* copilot feedback and fix styles

* remove duplicate calls

* remove early return to mitigate timing attacks

* Switch prop bags column to boolean

* fix lint

* fix tests

* Remove database search

* use Builder methods

* review feedback

* AutoTranslation interface with Core Translation Logic

* update timeouts to use short/medium/long translations

* external exports

* add configured languages to autotranslations

* added post prop for detected language

* fix bugs for storing translation and call translation service

* clean up interface

* add translations to GetPost repsonses and in the create post response

* use metadata for translation information and add new column for state of a translation

* change websocket event name

* change metadata to a map

* single in memory queue in the cluster leader

* remove unused definition

* Revert "remove unused definition"

This reverts commit e3e50cef30.

* remove webhub changes

* remove last webhub bit

* tidy up interface

* Frontend integration

* tidy up

* fix api response for translations

* Add Agents provider for auto translations (#34706)

* Add LLM backed autotranslation support

* Remove AU changes

* Remove orphaned tests for deleted GetActiveUserIDsForChannel

The GetActiveUserIDsForChannel function was removed from PlatformService
as part of the autotranslations refactoring, but its tests were left behind
causing linter/vet errors. This removes the orphaned test code:
- BenchmarkGetActiveUserIDsForChannel
- TestGetActiveUserIDsForChannel
- waitForActiveConnections helper

🤖 Generated with [Claude Code](https://claude.com/claude-code)



* Add missing i18n translations and fix linter errors

- Add 17 missing translation strings for autotranslation feature
- Fix shadow variable declarations in post.go and autotranslation.go
- Remove unused autoQueueMaxAge constant
- Remove unused setupWithFastIteration test function
- Use slices.Contains instead of manual loop
- Use maps.Copy instead of manual loop
- Remove empty if branch

🤖 Generated with [Claude Code](https://claude.com/claude-code)



* Fix tests

* Fixes for PR review

* add files

* Update webapp/channels/src/components/admin_console/localization/localization.scss



* fixes

* Fixes

* Didn't save

* Add a translation

* Fix translations

* Fix shadow err

---------




* tidy up code for review

* add support for editing posts

* i18n-extract

* i18n

* Rename show translations and add util to get message

* Fix get posts, migrations, websockets and configuration styles

* Fix CI

* i18n-extract

* Fix webapp tests

* Address UX feedback

* i18n-extract

* Fix lint

* updated shimmer animation, fixed issue with the width on compact icon buttons

* fix migrations

* fix markdown masking for bold, italics and strikethrough

* Address feedback

* Add missing changes

* Fix and add tests

* Fix circular dependencies

* lint

* lint

* lint and i18n

* Fix lint

* Fix i18n

* Minor changes

* Add check for whether the channel is translated or not for this user

* Fix lint and add missing change

* Fix lint

* Fix test

* Remove uneeded console log

* Fix duplicated code

* Fix small screen show translation modal

* Remove interactions on show translation modal

* Disable auto translation when the language is not supported

* Fix typo

* Fix copy text

* Fix updating autotranslation for normal users

* Fix autotranslate button showing when it shouldn't

* Fix styles

* Fix test

* Fix frontend member related changes

* Revert post improvements and remove duplicated code from bad merge

* Address feedback

* Fix test and i18n

* Fix e2e tests

* Revert lingering change from post improvements

* Fix lint

---------








(cherry picked from commit 1c7246da68)

Co-authored-by: Daniel Espino García <larkox@gmail.com>
Co-authored-by: Elias Nahum <nahumhbl@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: BenCookie95 <benkcooke@gmail.com>
Co-authored-by: Nick Misasi <nick.misasi@mattermost.com>
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Matthew Birtch <mattbirtch@gmail.com>
2026-02-11 15:49:30 +05:30
Mattermost Build
2b7e73eaef
MM-67538 Add ability for plugins to load asynchronously (#35238) (#35241)
Automatic Merge
2026-02-11 09:53:29 +02:00
Mattermost Build
3a652e19b3
MM-67137 Fix references to window in client package (#35195) (#35234)
Some checks are pending
Server CI / Compute Go Version (push) Waiting to run
Server CI / Check mocks (push) Blocked by required conditions
Server CI / Check go mod tidy (push) Blocked by required conditions
Server CI / check-style (push) Blocked by required conditions
Server CI / Check serialization methods for hot structs (push) Blocked by required conditions
Server CI / Vet API (push) Blocked by required conditions
Server CI / Check migration files (push) Blocked by required conditions
Server CI / Generate email templates (push) Blocked by required conditions
Server CI / Check store layers (push) Blocked by required conditions
Server CI / Check mmctl docs (push) Blocked by required conditions
Server CI / Postgres with binary parameters (push) Blocked by required conditions
Server CI / Postgres (push) Blocked by required conditions
Server CI / Postgres (FIPS) (push) Blocked by required conditions
Server CI / Generate Test Coverage (push) Blocked by required conditions
Server CI / Run mmctl tests (push) Blocked by required conditions
Server CI / Run mmctl tests (FIPS) (push) Blocked by required conditions
Server CI / Build mattermost server app (push) Blocked by required conditions
Web App CI / check-lint (push) Waiting to run
Web App CI / check-i18n (push) Blocked by required conditions
Web App CI / check-types (push) Blocked by required conditions
Web App CI / test (platform) (push) Blocked by required conditions
Web App CI / test (mattermost-redux) (push) Blocked by required conditions
Web App CI / test (channels shard 1/4) (push) Blocked by required conditions
Web App CI / test (channels shard 2/4) (push) Blocked by required conditions
Web App CI / test (channels shard 3/4) (push) Blocked by required conditions
Web App CI / test (channels shard 4/4) (push) Blocked by required conditions
Web App CI / upload-coverage (push) Blocked by required conditions
Web App CI / build (push) Blocked by required conditions
Automatic Merge
2026-02-10 19:53:29 +02:00
Mattermost Build
ca3c048334
Bumping prepackaged version of GitHub plugin (#35223) (#35233)
Automatic Merge
2026-02-10 16:53:29 +02:00
Mattermost Build
9aeb529f67
[MM-67114] Add mmctl license get command (#34878) (#35228)
Automatic Merge
2026-02-10 12:53:29 +02:00
Mattermost Build
d2992fec8f
[MM-67502] Sanitize secret plugin settings inside sections (#35214) (#35227)
Automatic Merge
2026-02-10 12:23:29 +02:00
245 changed files with 8938 additions and 1724 deletions

View file

@ -42,6 +42,8 @@ outputs:
description: Pass rate percentage (e.g., "100.00")
color:
description: Color for webhook based on pass rate (green=100%, yellow=99%+, orange=98%+, red=<98%)
test_duration:
description: Wall-clock test duration (earliest start to latest end across all specs, formatted as "Xm Ys")
runs:
using: node24

View file

@ -19082,6 +19082,12 @@ function getColor(passRate) {
return "#F44336";
}
}
function formatDuration(ms) {
const totalSeconds = Math.round(ms / 1e3);
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return `${minutes}m ${seconds}s`;
}
function calculateResultsFromSpecs(specs) {
let passed = 0;
let failed = 0;
@ -19105,6 +19111,25 @@ function calculateResultsFromSpecs(specs) {
}
}
}
let earliestStart = null;
let latestEnd = null;
for (const spec of specs) {
const { start, end } = spec.result.stats;
if (start) {
const startMs = new Date(start).getTime();
if (earliestStart === null || startMs < earliestStart) {
earliestStart = startMs;
}
}
if (end) {
const endMs = new Date(end).getTime();
if (latestEnd === null || endMs > latestEnd) {
latestEnd = endMs;
}
}
}
const testDurationMs = earliestStart !== null && latestEnd !== null ? latestEnd - earliestStart : 0;
const testDuration = formatDuration(testDurationMs);
const totalSpecs = specs.length;
const failedSpecs = Array.from(failedSpecsSet).join(",");
const failedSpecsCount = failedSpecsSet.size;
@ -19131,8 +19156,10 @@ function calculateResultsFromSpecs(specs) {
const total = passed + failed;
const passRate = total > 0 ? (passed * 100 / total).toFixed(2) : "0.00";
const color = getColor(parseFloat(passRate));
const specSuffix = totalSpecs > 0 ? ` in ${totalSpecs} spec files` : "";
const commitStatusMessage = failed === 0 ? `${passed} passed${specSuffix}` : `${failed} failed, ${passed} passed${specSuffix}`;
const rate = total > 0 ? passed * 100 / total : 0;
const rateStr = rate === 100 ? "100%" : `${rate.toFixed(1)}%`;
const specSuffix = totalSpecs > 0 ? `, ${totalSpecs} specs` : "";
const commitStatusMessage = rate === 100 ? `${rateStr} passed (${passed})${specSuffix}` : `${rateStr} passed (${passed}/${total}), ${failed} failed${specSuffix}`;
return {
passed,
failed,
@ -19144,7 +19171,8 @@ function calculateResultsFromSpecs(specs) {
failedTests,
total,
passRate,
color
color,
testDuration
};
}
async function loadSpecFiles(resultsPath) {
@ -19290,6 +19318,7 @@ async function run() {
info(`Failed Specs Count: ${calc.failedSpecsCount}`);
info(`Commit Status Message: ${calc.commitStatusMessage}`);
info(`Failed Specs: ${calc.failedSpecs || "none"}`);
info(`Test Duration: ${calc.testDuration}`);
endGroup();
setOutput("merged", merged.toString());
setOutput("passed", calc.passed);
@ -19303,6 +19332,7 @@ async function run() {
setOutput("total", calc.total);
setOutput("pass_rate", calc.passRate);
setOutput("color", calc.color);
setOutput("test_duration", calc.testDuration);
}
// src/index.ts

View file

@ -81,6 +81,7 @@ export async function run(): Promise<void> {
core.info(`Failed Specs Count: ${calc.failedSpecsCount}`);
core.info(`Commit Status Message: ${calc.commitStatusMessage}`);
core.info(`Failed Specs: ${calc.failedSpecs || "none"}`);
core.info(`Test Duration: ${calc.testDuration}`);
core.endGroup();
// Set all outputs
@ -96,4 +97,5 @@ export async function run(): Promise<void> {
core.setOutput("total", calc.total);
core.setOutput("pass_rate", calc.passRate);
core.setOutput("color", calc.color);
core.setOutput("test_duration", calc.testDuration);
}

View file

@ -108,7 +108,7 @@ describe("calculateResultsFromSpecs", () => {
expect(calc.totalSpecs).toBe(2);
expect(calc.failedSpecs).toBe("");
expect(calc.failedSpecsCount).toBe(0);
expect(calc.commitStatusMessage).toBe("2 passed in 2 spec files");
expect(calc.commitStatusMessage).toBe("100% passed (2), 2 specs");
});
it("should calculate all outputs correctly for results with failures", () => {
@ -136,7 +136,7 @@ describe("calculateResultsFromSpecs", () => {
expect(calc.failedSpecs).toBe("tests/integration/channels.spec.ts");
expect(calc.failedSpecsCount).toBe(1);
expect(calc.commitStatusMessage).toBe(
"1 failed, 1 passed in 2 spec files",
"50.0% passed (1/2), 1 failed, 2 specs",
);
expect(calc.failedTests).toContain("should create a channel");
});
@ -230,7 +230,7 @@ describe("merge simulation", () => {
expect(finalCalc.totalSpecs).toBe(3);
expect(finalCalc.failedSpecs).toBe("");
expect(finalCalc.failedSpecsCount).toBe(0);
expect(finalCalc.commitStatusMessage).toBe("3 passed in 3 spec files");
expect(finalCalc.commitStatusMessage).toBe("100% passed (3), 3 specs");
});
it("should handle case where retest still fails", () => {

View file

@ -97,6 +97,16 @@ function getColor(passRate: number): string {
/**
* Calculate results from parsed spec files
*/
/**
* Format milliseconds as "Xm Ys"
*/
function formatDuration(ms: number): string {
const totalSeconds = Math.round(ms / 1000);
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return `${minutes}m ${seconds}s`;
}
export function calculateResultsFromSpecs(
specs: ParsedSpecFile[],
): CalculationResult {
@ -125,6 +135,30 @@ export function calculateResultsFromSpecs(
}
}
// Compute test duration from earliest start to latest end across all specs
let earliestStart: number | null = null;
let latestEnd: number | null = null;
for (const spec of specs) {
const { start, end } = spec.result.stats;
if (start) {
const startMs = new Date(start).getTime();
if (earliestStart === null || startMs < earliestStart) {
earliestStart = startMs;
}
}
if (end) {
const endMs = new Date(end).getTime();
if (latestEnd === null || endMs > latestEnd) {
latestEnd = endMs;
}
}
}
const testDurationMs =
earliestStart !== null && latestEnd !== null
? latestEnd - earliestStart
: 0;
const testDuration = formatDuration(testDurationMs);
const totalSpecs = specs.length;
const failedSpecs = Array.from(failedSpecsSet).join(",");
const failedSpecsCount = failedSpecsSet.size;
@ -165,11 +199,13 @@ export function calculateResultsFromSpecs(
const color = getColor(parseFloat(passRate));
// Build commit status message
const specSuffix = totalSpecs > 0 ? ` in ${totalSpecs} spec files` : "";
const rate = total > 0 ? (passed * 100) / total : 0;
const rateStr = rate === 100 ? "100%" : `${rate.toFixed(1)}%`;
const specSuffix = totalSpecs > 0 ? `, ${totalSpecs} specs` : "";
const commitStatusMessage =
failed === 0
? `${passed} passed${specSuffix}`
: `${failed} failed, ${passed} passed${specSuffix}`;
rate === 100
? `${rateStr} passed (${passed})${specSuffix}`
: `${rateStr} passed (${passed}/${total}), ${failed} failed${specSuffix}`;
return {
passed,
@ -183,6 +219,7 @@ export function calculateResultsFromSpecs(
total,
passRate,
color,
testDuration,
};
}

View file

@ -130,6 +130,7 @@ export interface CalculationResult {
total: number;
passRate: string;
color: string;
testDuration: string;
}
export interface FailedTest {

View file

@ -45,6 +45,8 @@ outputs:
description: Number of passing tests (passed + flaky)
color:
description: Color for webhook based on pass rate (green=100%, yellow=99%+, orange=98%+, red=<98%)
test_duration:
description: Test execution duration from stats (formatted as "Xm Ys")
runs:
using: node24

View file

@ -19106,6 +19106,12 @@ function computeStats(suites, originalStats, retestStats) {
flaky
};
}
function formatDuration(ms) {
const totalSeconds = Math.round(ms / 1e3);
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return `${minutes}m ${seconds}s`;
}
function getColor(passRate) {
if (passRate === 100) {
return "#43A047";
@ -19173,8 +19179,11 @@ function calculateResults(results) {
const total = passing + failed;
const passRate = total > 0 ? (passing * 100 / total).toFixed(2) : "0.00";
const color = getColor(parseFloat(passRate));
const specSuffix = totalSpecs > 0 ? ` in ${totalSpecs} spec files` : "";
const commitStatusMessage = failed === 0 ? `${passed} passed${specSuffix}` : `${failed} failed, ${passed} passed${specSuffix}`;
const rate = total > 0 ? passing * 100 / total : 0;
const rateStr = rate === 100 ? "100%" : `${rate.toFixed(1)}%`;
const specSuffix = totalSpecs > 0 ? `, ${totalSpecs} specs` : "";
const commitStatusMessage = rate === 100 ? `${rateStr} passed (${passing})${specSuffix}` : `${rateStr} passed (${passing}/${total}), ${failed} failed${specSuffix}`;
const testDuration = formatDuration(stats.duration || 0);
return {
passed,
failed,
@ -19188,7 +19197,8 @@ function calculateResults(results) {
total,
passRate,
passing,
color
color,
testDuration
};
}
function mergeResults(original, retest) {
@ -19282,6 +19292,7 @@ async function run() {
info(`Failed Specs Count: ${calc.failedSpecsCount}`);
info(`Commit Status Message: ${calc.commitStatusMessage}`);
info(`Failed Specs: ${calc.failedSpecs || "none"}`);
info(`Test Duration: ${calc.testDuration}`);
endGroup();
setOutput("merged", merged.toString());
setOutput("passed", calc.passed);
@ -19297,6 +19308,7 @@ async function run() {
setOutput("pass_rate", calc.passRate);
setOutput("passing", calc.passing);
setOutput("color", calc.color);
setOutput("test_duration", calc.testDuration);
}
// src/index.ts

View file

@ -101,6 +101,7 @@ export async function run(): Promise<void> {
core.info(`Failed Specs Count: ${calc.failedSpecsCount}`);
core.info(`Commit Status Message: ${calc.commitStatusMessage}`);
core.info(`Failed Specs: ${calc.failedSpecs || "none"}`);
core.info(`Test Duration: ${calc.testDuration}`);
core.endGroup();
// Set all outputs
@ -118,4 +119,5 @@ export async function run(): Promise<void> {
core.setOutput("pass_rate", calc.passRate);
core.setOutput("passing", calc.passing);
core.setOutput("color", calc.color);
core.setOutput("test_duration", calc.testDuration);
}

View file

@ -262,7 +262,7 @@ describe("calculateResults", () => {
expect(calc.totalSpecs).toBe(2);
expect(calc.failedSpecs).toBe("");
expect(calc.failedSpecsCount).toBe(0);
expect(calc.commitStatusMessage).toBe("2 passed in 2 spec files");
expect(calc.commitStatusMessage).toBe("100% passed (2), 2 specs");
});
it("should calculate all outputs correctly for results with failures", () => {
@ -305,7 +305,7 @@ describe("calculateResults", () => {
expect(calc.failedSpecs).toBe("channels.spec.ts");
expect(calc.failedSpecsCount).toBe(1);
expect(calc.commitStatusMessage).toBe(
"1 failed, 1 passed in 2 spec files",
"50.0% passed (1/2), 1 failed, 2 specs",
);
expect(calc.failedTests).toContain("should create channel");
});
@ -445,7 +445,7 @@ describe("full integration: original with failure, retest passes", () => {
expect(finalCalc.totalSpecs).toBe(3);
expect(finalCalc.failedSpecs).toBe("");
expect(finalCalc.failedSpecsCount).toBe(0);
expect(finalCalc.commitStatusMessage).toBe("3 passed in 3 spec files");
expect(finalCalc.commitStatusMessage).toBe("100% passed (3), 3 specs");
expect(finalCalc.failedTests).toBe("");
});

View file

@ -130,6 +130,16 @@ export function computeStats(
};
}
/**
* Format milliseconds as "Xm Ys"
*/
function formatDuration(ms: number): string {
const totalSeconds = Math.round(ms / 1000);
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return `${minutes}m ${seconds}s`;
}
/**
* Get color based on pass rate
*/
@ -228,11 +238,15 @@ export function calculateResults(
const color = getColor(parseFloat(passRate));
// Build commit status message
const specSuffix = totalSpecs > 0 ? ` in ${totalSpecs} spec files` : "";
const rate = total > 0 ? (passing * 100) / total : 0;
const rateStr = rate === 100 ? "100%" : `${rate.toFixed(1)}%`;
const specSuffix = totalSpecs > 0 ? `, ${totalSpecs} specs` : "";
const commitStatusMessage =
failed === 0
? `${passed} passed${specSuffix}`
: `${failed} failed, ${passed} passed${specSuffix}`;
rate === 100
? `${rateStr} passed (${passing})${specSuffix}`
: `${rateStr} passed (${passing}/${total}), ${failed} failed${specSuffix}`;
const testDuration = formatDuration(stats.duration || 0);
return {
passed,
@ -248,6 +262,7 @@ export function calculateResults(
passRate,
passing,
color,
testDuration,
};
}

View file

@ -80,6 +80,7 @@ export interface CalculationResult {
passRate: string;
passing: number;
color: string;
testDuration: string;
}
export interface FailedTest {

View file

@ -18,7 +18,7 @@ outputs:
description: Whether the PR contains only E2E test changes (true/false)
value: ${{ steps.check.outputs.e2e_test_only }}
image_tag:
description: Docker image tag to use (master for E2E-only, short SHA for mixed)
description: Docker image tag to use (base branch ref for E2E-only, short SHA for mixed)
value: ${{ steps.check.outputs.image_tag }}
runs:
@ -33,7 +33,8 @@ runs:
INPUT_HEAD_SHA: ${{ inputs.head_sha }}
INPUT_PR_NUMBER: ${{ inputs.pr_number }}
run: |
# Resolve SHAs from PR number if not provided
# Resolve SHAs and base branch from PR number if not provided
BASE_REF=""
if [ -z "$INPUT_BASE_SHA" ] || [ -z "$INPUT_HEAD_SHA" ]; then
if [ -z "$INPUT_PR_NUMBER" ]; then
echo "::error::Either base_sha/head_sha or pr_number must be provided"
@ -44,14 +45,24 @@ runs:
PR_DATA=$(gh api "repos/${{ github.repository }}/pulls/${INPUT_PR_NUMBER}")
INPUT_BASE_SHA=$(echo "$PR_DATA" | jq -r '.base.sha')
INPUT_HEAD_SHA=$(echo "$PR_DATA" | jq -r '.head.sha')
BASE_REF=$(echo "$PR_DATA" | jq -r '.base.ref')
if [ -z "$INPUT_BASE_SHA" ] || [ "$INPUT_BASE_SHA" = "null" ] || \
[ -z "$INPUT_HEAD_SHA" ] || [ "$INPUT_HEAD_SHA" = "null" ]; then
echo "::error::Could not resolve SHAs for PR #${INPUT_PR_NUMBER}"
exit 1
fi
elif [ -n "$INPUT_PR_NUMBER" ]; then
# SHAs provided but we still need the base branch ref
BASE_REF=$(gh api "repos/${{ github.repository }}/pulls/${INPUT_PR_NUMBER}" --jq '.base.ref')
fi
# Default to master if base ref could not be determined
if [ -z "$BASE_REF" ] || [ "$BASE_REF" = "null" ]; then
BASE_REF="master"
fi
echo "PR base branch: ${BASE_REF}"
SHORT_SHA="${INPUT_HEAD_SHA::7}"
# Get changed files - try git first, fall back to API
@ -73,7 +84,8 @@ runs:
while IFS= read -r file; do
[ -z "$file" ] && continue
if [[ ! "$file" =~ ^e2e-tests/ ]] && \
[[ ! "$file" =~ ^\.github/workflows/e2e- ]]; then
[[ ! "$file" =~ ^\.github/workflows/e2e- ]] && \
[[ ! "$file" =~ ^\.github/actions/ ]]; then
echo "Non-E2E file found: $file"
E2E_TEST_ONLY="false"
break
@ -84,8 +96,9 @@ runs:
# Set outputs
echo "e2e_test_only=${E2E_TEST_ONLY}" >> $GITHUB_OUTPUT
if [ "$E2E_TEST_ONLY" = "true" ]; then
echo "image_tag=master" >> $GITHUB_OUTPUT
if [ "$E2E_TEST_ONLY" = "true" ] && \
{ [ "$BASE_REF" = "master" ] || [[ "$BASE_REF" =~ ^release-[0-9]+\.[0-9]+$ ]]; }; then
echo "image_tag=${BASE_REF}" >> $GITHUB_OUTPUT
else
echo "image_tag=${SHORT_SHA}" >> $GITHUB_OUTPUT
fi

View file

@ -1,320 +0,0 @@
# E2E Test Workflow For PR
This document describes the E2E test workflow for Pull Requests in Mattermost.
## Overview
This is an **automated workflow** that runs smoke-then-full E2E tests automatically for every PR commit. Smoke tests run first as a gate—if they fail, full tests are skipped to save CI resources and provide fast feedback.
Both Cypress and Playwright test suites run **in parallel** with independent status checks.
**Note**: This workflow is designed for **Pull Requests only**. It will fail if the commit SHA is not associated with an open PR.
### On-Demand Testing
For on-demand E2E testing, the existing triggers still work:
- **Comment triggers**: `/e2e-test`, `/e2e-test fips`, or with `MM_ENV` parameters
- **Label trigger**: `E2E/Run`
These manual triggers are separate from this automated workflow and can be used for custom test configurations or re-runs.
## Workflow Files
```
.github/workflows/
├── e2e-tests-ci.yml # Main orchestrator (resolves PR, triggers both)
├── e2e-tests-cypress.yml # Cypress: smoke → full
└── e2e-tests-playwright.yml # Playwright: smoke → full
```
## Architecture Diagram
```
┌─────────────────────────────────────────────────────────────────────────────────┐
│ MAIN ORCHESTRATOR: e2e-tests-ci.yml │
└─────────────────────────────────────────────────────────────────────────────────┘
┌─────────────────────┐
│ workflow_dispatch │
│ (commit_sha) │
└──────────┬──────────┘
┌──────────▼──────────┐
│ resolve-pr │
│ (GitHub API call) │
│ │
│ Fails if no PR │
│ found for commit │
└──────────┬──────────┘
┌──────────────────┴──────────────────┐
│ (parallel) │
▼ ▼
┌─────────────────────────────────┐ ┌─────────────────────────────────┐
│ e2e-tests-cypress.yml │ │ e2e-tests-playwright.yml │
│ (reusable workflow) │ │ (reusable workflow) │
│ │ │ │
│ Inputs: │ │ Inputs: │
│ • commit_sha │ │ • commit_sha │
│ • workers_number: "20" │ │ • workers_number: "1" (default)│
│ • server: "onprem" │ │ • server: "onprem" │
│ • enable_reporting: true │ │ • enable_reporting: true │
│ • report_type: "PR" │ │ • report_type: "PR" │
│ • pr_number │ │ • pr_number (required for full)│
└─────────────────────────────────┘ └─────────────────────────────────┘
```
## Per-Framework Workflow Flow
Each framework (Cypress/Playwright) follows the same pattern:
```
┌──────────────────────────────────────────────────────────────────┐
│ PREFLIGHT CHECKS │
└──────────────────────────────────────────────────────────────────┘
┌─────────────────────────┼─────────────────────────┐
│ │ │
▼ ▼ ▼
┌────────────┐ ┌─────────────┐ ┌─────────────┐
│ lint/tsc │ │ shell-check │ │ update- │
│ check │ │ │ │ status │
└─────┬──────┘ └──────┬──────┘ │ (pending) │
│ │ └──────┬──────┘
└──────────────────────┴────────────────────────┘
┌──────────────────────────────────────────────────────────────────┐
│ GENERATE BUILD VARIABLES │
│ (branch, build_id, server_image) │
│ │
│ Server image generated from commit SHA: │
│ mattermostdevelopment/mattermost-enterprise-edition:<sha7>
└─────────────────────────────┬────────────────────────────────────┘
┌──────────────────────────────────────────────────────────────────┐
│ SMOKE TESTS │
│ ┌────────────────────────────────────────────────────────────┐ │
│ │ generate-test-cycle (smoke) [Cypress only] │ │
│ └─────────────────────────┬──────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌────────────────────────────────────────────────────────────┐ │
│ │ smoke-test │ │
│ │ • Cypress: TEST_FILTER: --stage=@prod --group=@smoke │ │
│ │ • Playwright: TEST_FILTER: --grep @smoke │ │
│ │ • Fail fast if any smoke test fails │ │
│ └─────────────────────────┬──────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌────────────────────────────────────────────────────────────┐ │
│ │ smoke-report │ │
│ │ • Assert 0 failures │ │
│ │ • Upload results to S3 (Playwright) │ │
│ │ • Update commit status │ │
│ └────────────────────────────────────────────────────────────┘ │
└─────────────────────────────┬────────────────────────────────────┘
│ (only if smoke passes)
│ (Playwright: also requires pr_number)
┌──────────────────────────────────────────────────────────────────┐
│ FULL TESTS │
│ ┌────────────────────────────────────────────────────────────┐ │
│ │ generate-test-cycle (full) [Cypress only] │ │
│ └─────────────────────────┬──────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌────────────────────────────────────────────────────────────┐ │
│ │ full-test (matrix: workers) │ │
│ │ • Cypress: TEST_FILTER: --stage='@prod' │ │
│ │ --exclude-group='@smoke' │ │
│ │ • Playwright: TEST_FILTER: --grep-invert "@smoke|@visual" │ │
│ │ • Multiple workers for parallelism │ │
│ └─────────────────────────┬──────────────────────────────────┘ │
│ │ │
│ ▼ │
│ ┌────────────────────────────────────────────────────────────┐ │
│ │ full-report │ │
│ │ • Aggregate results from all workers │ │
│ │ • Upload results to S3 (Playwright) │ │
│ │ • Publish report (if reporting enabled) │ │
│ │ • Update final commit status │ │
│ └────────────────────────────────────────────────────────────┘ │
└──────────────────────────────────────────────────────────────────┘
```
## Commit Status Checks
Each workflow phase creates its own GitHub commit status check:
```
GitHub Commit Status Checks:
═══════════════════════════
┌─────────────────────────────────────────────────────────────────────────────┐
│ E2E Tests/cypress-smoke ●────────●────────● │
│ pending running ✓ passed / ✗ failed │
│ │
│ E2E Tests/cypress-full ○ ○ ●────────●────────● │
│ (skip) (skip) pending running ✓/✗ │
│ │ │
│ └── Only runs if smoke passes │
└─────────────────────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────────────────────┐
│ E2E Tests/playwright-smoke ●────────●────────● │
│ pending running ✓ passed / ✗ failed │
│ │
│ E2E Tests/playwright-full ○ ○ ●────────●────────● │
│ (skip) (skip) pending running ✓/✗ │
│ │ │
│ └── Only runs if smoke passes │
│ AND pr_number is provided │
└─────────────────────────────────────────────────────────────────────────────┘
```
## Timeline
```
Timeline:
─────────────────────────────────────────────────────────────────────────────►
T0 T1 T2 T3 T4
│ │ │ │ │
│ Start │ Preflight │ Smoke Tests │ Full Tests │ Done
│ resolve │ Checks │ (both parallel) │ (both parallel) │
│ PR │ │ │ (if smoke pass) │
```
## Test Filtering
| Framework | Smoke Tests | Full Tests |
|-----------|-------------|------------|
| **Cypress** | `--stage=@prod --group=@smoke` | See below |
| **Playwright** | `--grep @smoke` | `--grep-invert "@smoke\|@visual"` |
### Cypress Full Test Filter
```
--stage="@prod"
--excludeGroup="@smoke,@te_only,@cloud_only,@high_availability"
--sortFirst="@compliance_export,@elasticsearch,@ldap_group,@ldap"
--sortLast="@saml,@keycloak,@plugin,@plugins_uninstall,@mfa,@license_removal"
```
- **excludeGroup**: Skips smoke tests (already run), TE-only, cloud-only, and HA tests
- **sortFirst**: Runs long-running test groups early for better parallelization
- **sortLast**: Runs tests that may affect system state at the end
## Tagging Smoke Tests
### Cypress
Add `@smoke` to the Group comment at the top of spec files:
```javascript
// Stage: @prod
// Group: @channels @messaging @smoke
```
### Playwright
Add `@smoke` to the test tag option:
```typescript
test('critical login flow', {tag: ['@smoke', '@login']}, async ({pw}) => {
// ...
});
```
## Worker Configuration
| Framework | Smoke Workers | Full Workers |
|-----------|---------------|--------------|
| **Cypress** | 1 | 20 |
| **Playwright** | 1 | 1 (uses internal parallelism via `PW_WORKERS`) |
## Docker Services
Different test phases enable different Docker services based on test requirements:
| Test Phase | Docker Services |
|------------|-----------------|
| Smoke Tests | `postgres inbucket` |
| Full Tests | `postgres inbucket minio openldap elasticsearch keycloak` |
Full tests enable additional services to support tests requiring LDAP, Elasticsearch, S3-compatible storage (Minio), and SAML/OAuth (Keycloak).
## Failure Behavior
1. **Smoke test fails**: Full tests are skipped, only smoke commit status shows failure (no full test status created)
2. **Full test fails**: Full commit status shows failure with details
3. **Both pass**: Both smoke and full commit statuses show success
4. **No PR found**: Workflow fails immediately with error message
**Note**: Full test status updates use explicit job result checks (`needs.full-report.result == 'success'` / `'failure'`) rather than global `success()` / `failure()` functions. This ensures full test status is only updated when full tests actually run, not when smoke tests fail upstream.
## Manual Trigger
The workflow can be triggered manually via `workflow_dispatch` for PR commits:
```bash
# Run E2E tests for a PR commit
gh workflow run e2e-tests-ci.yml -f commit_sha=<PR_COMMIT_SHA>
```
**Note**: The commit SHA must be associated with an open PR. The workflow will fail otherwise.
## Automated Trigger (Argo Events)
The workflow is automatically triggered by Argo Events when the `Enterprise CI/docker-image` status check succeeds on a commit.
### Fork PR Handling
For PRs from forked repositories:
- `body.branches` may be empty (commit doesn't exist in base repo branches)
- Falls back to `master` branch for workflow files (trusted code)
- The `commit_sha` still points to the fork's commit for testing
- PR number is resolved via GitHub API (works for fork PRs)
### Flow
```
Enterprise CI/docker-image succeeds
Argo Events Sensor
workflow_dispatch
(ref, commit_sha)
e2e-tests-ci.yml
resolve-pr (GitHub API)
Cypress + Playwright (parallel)
```
## S3 Report Storage
Playwright test results are uploaded to S3:
| Test Phase | S3 Path |
|------------|---------|
| Smoke (with PR) | `server-pr-{PR_NUMBER}/e2e-reports/playwright-smoke/{RUN_ID}/` |
| Smoke (no PR) | `server-commit-{SHA7}/e2e-reports/playwright-smoke/{RUN_ID}/` |
| Full | `server-pr-{PR_NUMBER}/e2e-reports/playwright-full/{RUN_ID}/` |
**Note**: Full tests require a PR number, so there's no commit-based fallback for full test reports.
## Related Files
- `e2e-tests/cypress/` - Cypress test suite
- `e2e-tests/playwright/` - Playwright test suite
- `e2e-tests/.ci/` - CI configuration and environment files
- `e2e-tests/Makefile` - Main Makefile with targets for running tests, generating cycles, and reporting

352
.github/e2e-tests-workflows.md vendored Normal file
View file

@ -0,0 +1,352 @@
# E2E Test Pipelines
Three automated E2E test pipelines cover different stages of the development lifecycle.
## Pipelines
| Pipeline | Trigger | Editions Tested | Image Source |
|----------|---------|----------------|--------------|
| **PR** (`e2e-tests-ci.yml`) | Argo Events on `Enterprise CI/docker-image` status | enterprise | `mattermostdevelopment/**` |
| **Merge to master/release** (`e2e-tests-on-merge.yml`) | Platform delivery after docker build (`delivery-platform/.github/workflows/mattermost-platform-delivery.yaml`) | enterprise, fips | `mattermostdevelopment/**` |
| **Release cut** (`e2e-tests-on-release.yml`) | Platform release after docker build (`delivery-platform/.github/workflows/release-mattermost-platform.yml`) | enterprise, fips, team (future) | `mattermost/**` |
All pipelines follow the **smoke-then-full** pattern: smoke tests run first, full tests only run if smoke passes.
## Workflow Files
```
.github/workflows/
├── e2e-tests-ci.yml # PR orchestrator
├── e2e-tests-on-merge.yml # Merge orchestrator (master/release branches)
├── e2e-tests-on-release.yml # Release cut orchestrator
├── e2e-tests-cypress.yml # Shared wrapper: cypress smoke -> full
├── e2e-tests-playwright.yml # Shared wrapper: playwright smoke -> full
├── e2e-tests-cypress-template.yml # Template: actual cypress test execution
└── e2e-tests-playwright-template.yml # Template: actual playwright test execution
```
### Call hierarchy
```
e2e-tests-ci.yml ─────────────────┐
e2e-tests-on-merge.yml ───────────┤──► e2e-tests-cypress.yml ──► e2e-tests-cypress-template.yml
e2e-tests-on-release.yml ─────────┘ e2e-tests-playwright.yml ──► e2e-tests-playwright-template.yml
```
---
## Pipeline 1: PR (`e2e-tests-ci.yml`)
Runs E2E tests for every PR commit after the enterprise docker image is built. Fails if the commit is not associated with an open PR.
**Trigger chain:**
```
PR commit ─► Enterprise CI builds docker image
─► Argo Events detects "Enterprise CI/docker-image" status
─► dispatches e2e-tests-ci.yml
```
For PRs from forks, `body.branches` may be empty so the workflow falls back to `master` for workflow files (trusted code), while `commit_sha` still points to the fork's commit.
**Jobs:** 2 (cypress + playwright), each does smoke -> full
**Commit statuses (4 total):**
| Context | Description (pending) | Description (result) |
|---------|----------------------|---------------------|
| `e2e-test/cypress-smoke\|enterprise` | `tests running, image_tag:abc1234` | `100% passed (1313), 440 specs, image_tag:abc1234` |
| `e2e-test/cypress-full\|enterprise` | `tests running, image_tag:abc1234` | `100% passed (1313), 440 specs, image_tag:abc1234` |
| `e2e-test/playwright-smoke\|enterprise` | `tests running, image_tag:abc1234` | `100% passed (200), 50 specs, image_tag:abc1234` |
| `e2e-test/playwright-full\|enterprise` | `tests running, image_tag:abc1234` | `99.5% passed (199/200), 1 failed, 50 specs, image_tag:abc1234` |
**Manual trigger (CLI):**
```bash
gh workflow run e2e-tests-ci.yml \
--repo mattermost/mattermost \
--field pr_number="35171"
```
**Manual trigger (GitHub UI):**
1. Go to **Actions** > **E2E Tests (smoke-then-full)**
2. Click **Run workflow**
3. Fill in `pr_number` (e.g., `35171`)
4. Click **Run workflow**
### On-demand testing
For on-demand E2E testing, the existing triggers still work:
- **Comment triggers**: `/e2e-test`, `/e2e-test fips`, or with `MM_ENV` parameters
- **Label trigger**: `E2E/Run`
These are separate from the automated workflow and can be used for custom test configurations or re-runs.
---
## Pipeline 2: Merge (`e2e-tests-on-merge.yml`)
Runs E2E tests after every push/merge to `master` or `release-*` branches.
**Trigger chain:**
```
Push to master/release-*
─► Argo Events (mattermost-platform-package sensor)
─► delivery-platform/.github/workflows/mattermost-platform-delivery.yaml
─► builds docker images (enterprise + fips)
─► trigger-e2e-tests job dispatches e2e-tests-on-merge.yml
```
**Jobs:** 4 (cypress + playwright) x (enterprise + fips), smoke skipped, full tests only
**Commit statuses (4 total):**
| Context | Description example |
|---------|-------------------|
| `e2e-test/cypress-full\|enterprise` | `100% passed (1313), 440 specs, image_tag:abc1234_def5678` |
| `e2e-test/cypress-full\|fips` | `100% passed (1313), 440 specs, image_tag:abc1234_def5678` |
| `e2e-test/playwright-full\|enterprise` | `100% passed (200), 50 specs, image_tag:abc1234_def5678` |
| `e2e-test/playwright-full\|fips` | `100% passed (200), 50 specs, image_tag:abc1234_def5678` |
**Manual trigger (CLI):**
```bash
# For master
gh workflow run e2e-tests-on-merge.yml \
--repo mattermost/mattermost \
--field branch="master" \
--field commit_sha="<full_commit_sha>" \
--field server_image_tag="<image_tag>"
# For release branch
gh workflow run e2e-tests-on-merge.yml \
--repo mattermost/mattermost \
--field branch="release-11.4" \
--field commit_sha="<full_commit_sha>" \
--field server_image_tag="<image_tag>"
```
**Manual trigger (GitHub UI):**
1. Go to **Actions** > **E2E Tests (master/release - merge)**
2. Click **Run workflow**
3. Fill in:
- `branch`: `master` or `release-11.4`
- `commit_sha`: full 40-char SHA
- `server_image_tag`: e.g., `abc1234_def5678`
4. Click **Run workflow**
---
## Pipeline 3: Release Cut (`e2e-tests-on-release.yml`)
Runs E2E tests after a release cut against the published release images.
**Trigger chain:**
```
Manual release cut
─► delivery-platform/.github/workflows/release-mattermost-platform.yml
─► builds and publishes release docker images
─► trigger-e2e-tests job dispatches e2e-tests-on-release.yml
```
**Jobs:** 4 (cypress + playwright) x (enterprise + fips), smoke skipped, full tests only. Team edition planned for future.
**Commit statuses (4 total, 6 when team is enabled):**
Descriptions include alias tags showing which rolling docker tags point to the same image.
RC example (11.4.0-rc3):
| Context | Description example |
|---------|-------------------|
| `e2e-test/cypress-full\|enterprise` | `100% passed (1313), 440 specs, image_tag:11.4.0-rc3 (release-11.4, release-11)` |
| `e2e-test/cypress-full\|fips` | `100% passed (1313), 440 specs, image_tag:11.4.0-rc3 (release-11.4, release-11)` |
| `e2e-test/cypress-full\|team` (future) | `100% passed (1313), 440 specs, image_tag:11.4.0-rc3 (release-11.4, release-11)` |
Stable example (11.4.0) — includes `MAJOR.MINOR` alias:
| Context | Description example |
|---------|-------------------|
| `e2e-test/cypress-full\|enterprise` | `100% passed (1313), 440 specs, image_tag:11.4.0 (release-11.4, release-11, 11.4)` |
| `e2e-test/cypress-full\|fips` | `100% passed (1313), 440 specs, image_tag:11.4.0 (release-11.4, release-11, 11.4)` |
| `e2e-test/cypress-full\|team` (future) | `100% passed (1313), 440 specs, image_tag:11.4.0 (release-11.4, release-11, 11.4)` |
**Manual trigger (CLI):**
```bash
gh workflow run e2e-tests-on-release.yml \
--repo mattermost/mattermost \
--field branch="release-11.4" \
--field commit_sha="<full_commit_sha>" \
--field server_image_tag="11.4.0" \
--field server_image_aliases="release-11.4, release-11, 11.4"
```
**Manual trigger (GitHub UI):**
1. Go to **Actions** > **E2E Tests (release cut)**
2. Click **Run workflow**
3. Fill in:
- `branch`: `release-11.4`
- `commit_sha`: full 40-char SHA
- `server_image_tag`: e.g., `11.4.0` or `11.4.0-rc3`
- `server_image_aliases`: e.g., `release-11.4, release-11, 11.4` (optional)
4. Click **Run workflow**
---
## Commit Status Format
**Context name:** `e2e-test/<phase>|<edition>`
Where `<phase>` is `cypress-smoke`, `cypress-full`, `playwright-smoke`, or `playwright-full`.
**Description format:**
- All passed: `100% passed (<count>), <specs> specs, image_tag:<tag>[ (<aliases>)]`
- With failures: `<rate>% passed (<passed>/<total>), <failed> failed, <specs> specs, image_tag:<tag>[ (<aliases>)]`
- Pending: `tests running, image_tag:<tag>[ (<aliases>)]`
- Pass rate: `100%` if all pass, otherwise one decimal (e.g., `99.5%`)
- Aliases only present for release cuts
### Failure behavior
1. **Smoke test fails**: Full tests are skipped, only smoke commit status shows failure
2. **Full test fails**: Full commit status shows failure with pass rate
3. **Both pass**: Both smoke and full commit statuses show success
4. **No PR found** (PR pipeline only): Workflow fails immediately
---
## Smoke-then-Full Pattern
Each wrapper (Cypress/Playwright) follows this flow:
```
generate-build-variables (branch, build_id, server_image)
─► smoke tests (1 worker, minimal docker services)
─► if smoke passes ─► full tests (20 workers cypress / 1 worker playwright, all docker services)
─► report (aggregate results, update commit status)
```
### Test filtering
| Framework | Smoke | Full |
|-----------|-------|------|
| **Cypress** | `--stage=@prod --group=@smoke` | `--stage="@prod" --excludeGroup="@te_only,@cloud_only,@high_availability" --sortFirst=... --sortLast=...` |
| **Playwright** | `--grep @smoke` | `--grep-invert "@smoke\|@visual"` |
### Worker configuration
| Framework | Smoke Workers | Full Workers |
|-----------|---------------|--------------|
| **Cypress** | 1 | 20 |
| **Playwright** | 1 | 1 (uses internal parallelism via `PW_WORKERS`) |
### Docker services
| Test Phase | Docker Services |
|------------|-----------------|
| Smoke | `postgres inbucket` |
| Full | `postgres inbucket minio openldap elasticsearch keycloak` |
---
## Tagging Smoke Tests
### Cypress
Add `@smoke` to the Group comment at the top of spec files:
```javascript
// Stage: @prod
// Group: @channels @messaging @smoke
```
### Playwright
Add `@smoke` to the test tag option:
```typescript
test('critical login flow', {tag: ['@smoke', '@login']}, async ({pw}) => {
// ...
});
```
---
## Shared Wrapper Inputs
The wrappers (`e2e-tests-cypress.yml`, `e2e-tests-playwright.yml`) accept these inputs:
| Input | Default | Description |
|-------|---------|-------------|
| `server_edition` | `enterprise` | Edition: `enterprise`, `fips`, or `team` |
| `server_image_repo` | `mattermostdevelopment` | Docker namespace: `mattermostdevelopment` or `mattermost` |
| `server_image_tag` | derived from `commit_sha` | Docker image tag |
| `server_image_aliases` | _(empty)_ | Alias tags shown in commit status description |
| `ref_branch` | _(empty)_ | Source branch name for webhook messages (e.g., `master` or `release-11.4`) |
The automation dashboard branch name is derived from context:
- PR: `server-pr-<pr_number>` (e.g., `server-pr-35205`)
- Master merge: `server-master-<image_tag>` (e.g., `server-master-abc1234_def5678`)
- Release merge: `server-release-<version>-<image_tag>` (e.g., `server-release-11.4-abc1234_def5678`)
- Fallback: `server-commit-<image_tag>`
The test type suffix (`-smoke` or `-full`) is appended by the template.
The server image is derived as:
```
{server_image_repo}/{edition_image_name}:{server_image_tag}
```
Where `edition_image_name` maps to:
- `enterprise` -> `mattermost-enterprise-edition`
- `fips` -> `mattermost-enterprise-fips-edition`
- `team` -> `mattermost-team-edition`
---
## Webhook Message Format
After full tests complete, a webhook notification is sent to the configured `REPORT_WEBHOOK_URL`. The results line uses the same `commit_status_message` as the GitHub commit status. The source line varies by pipeline using `report_type` and `ref_branch`.
**Report types:** `PR`, `MASTER`, `RELEASE`, `RELEASE_CUT`
### PR
```
:open-pull-request: mattermost-pr-35205
:docker: mattermostdevelopment/mattermost-enterprise-edition:abc1234
100% passed (1313), 440 specs | full report
```
### Merge to master
```
:git_merge: abc1234 on master
:docker: mattermostdevelopment/mattermost-enterprise-edition:abc1234_def5678
100% passed (1313), 440 specs | full report
```
### Merge to release branch
```
:git_merge: abc1234 on release-11.4
:docker: mattermostdevelopment/mattermost-enterprise-edition:abc1234_def5678
100% passed (1313), 440 specs | full report
```
### Release cut
```
:github_round: abc1234 on release-11.4
:docker: mattermost/mattermost-enterprise-edition:11.4.0-rc3
100% passed (1313), 440 specs | full report
```
The commit short SHA links to the commit on GitHub. The PR number links to the pull request.
---
## Related Files
- `e2e-tests/cypress/` - Cypress test suite
- `e2e-tests/playwright/` - Playwright test suite
- `e2e-tests/.ci/` - CI configuration and environment files
- `e2e-tests/Makefile` - Makefile with targets for running tests, generating cycles, and reporting

View file

@ -1,5 +1,5 @@
---
name: E2E Tests (smoke-then-full)
name: E2E Tests (pull request)
on:
# Argo Events Trigger (automated):
# - Triggered by: Enterprise CI/docker-image status check (success)
@ -72,16 +72,31 @@ jobs:
# Argo Events trigger: commit SHA provided, resolve PR number
if [ -n "$INPUT_COMMIT_SHA" ]; then
echo "Automated trigger: resolving PR number from commit ${INPUT_COMMIT_SHA}"
PR_NUMBER=$(gh api "repos/${{ github.repository }}/commits/${INPUT_COMMIT_SHA}/pulls" \
--jq '.[0].number // empty' 2>/dev/null || echo "")
if [ -n "$PR_NUMBER" ]; then
echo "Found PR #${PR_NUMBER} for commit ${INPUT_COMMIT_SHA}"
echo "PR_NUMBER=${PR_NUMBER}" >> $GITHUB_OUTPUT
echo "COMMIT_SHA=${INPUT_COMMIT_SHA}" >> $GITHUB_OUTPUT
else
PR_DATA=$(gh api "repos/${{ github.repository }}/commits/${INPUT_COMMIT_SHA}/pulls" \
--jq '.[0] // empty' 2>/dev/null || echo "")
PR_NUMBER=$(echo "$PR_DATA" | jq -r '.number // empty' 2>/dev/null || echo "")
if [ -z "$PR_NUMBER" ]; then
echo "::error::No PR found for commit ${INPUT_COMMIT_SHA}. This workflow is for PRs only."
exit 1
fi
echo "Found PR #${PR_NUMBER} for commit ${INPUT_COMMIT_SHA}"
# Skip if PR is already merged to master or a release branch.
# The e2e-tests-on-merge workflow handles post-merge E2E tests.
PR_MERGED=$(echo "$PR_DATA" | jq -r '.merged_at // empty' 2>/dev/null || echo "")
PR_BASE_REF=$(echo "$PR_DATA" | jq -r '.base.ref // empty' 2>/dev/null || echo "")
if [ -n "$PR_MERGED" ]; then
if [ "$PR_BASE_REF" = "master" ] || [[ "$PR_BASE_REF" =~ ^release-[0-9]+\.[0-9]+$ ]]; then
echo "PR #${PR_NUMBER} is already merged to ${PR_BASE_REF}. Skipping - handled by e2e-tests-on-merge workflow."
echo "PR_NUMBER=" >> $GITHUB_OUTPUT
echo "COMMIT_SHA=" >> $GITHUB_OUTPUT
exit 0
fi
fi
echo "PR_NUMBER=${PR_NUMBER}" >> $GITHUB_OUTPUT
echo "COMMIT_SHA=${INPUT_COMMIT_SHA}" >> $GITHUB_OUTPUT
exit 0
fi
@ -90,6 +105,7 @@ jobs:
exit 1
- name: ci/check-e2e-test-only
if: steps.resolve.outputs.PR_NUMBER != ''
id: e2e-check
uses: ./.github/actions/check-e2e-test-only
with:
@ -98,6 +114,7 @@ jobs:
check-changes:
needs: resolve-pr
if: needs.resolve-pr.outputs.PR_NUMBER != ''
runs-on: ubuntu-24.04
outputs:
should_run: "${{ steps.check.outputs.should_run }}"

View file

@ -17,11 +17,6 @@ on:
type: number
required: false
default: 1
timeout_minutes:
description: "Job timeout in minutes"
type: number
required: false
default: 30
enabled_docker_services:
description: "Space-separated list of docker services to enable"
type: string
@ -46,6 +41,20 @@ on:
type: string
required: false
default: onprem
server_edition:
description: "Server edition: enterprise (default), fips, or team"
type: string
required: false
default: enterprise
server_image_repo:
description: "Docker registry: mattermostdevelopment (default) or mattermost"
type: string
required: false
default: mattermostdevelopment
server_image_aliases:
description: "Comma-separated alias tags for description (e.g., 'release-11.4, release-11')"
type: string
required: false
# Reporting options
enable_reporting:
@ -55,6 +64,10 @@ on:
report_type:
type: string
required: false
ref_branch:
description: "Source branch name for webhook messages (e.g., 'master' or 'release-11.4')"
type: string
required: false
pr_number:
type: string
required: false
@ -92,7 +105,7 @@ on:
required: false
env:
SERVER_IMAGE: "mattermostdevelopment/mattermost-enterprise-edition:${{ inputs.server_image_tag }}"
SERVER_IMAGE: "${{ inputs.server_image_repo }}/${{ inputs.server_edition == 'fips' && 'mattermost-enterprise-fips-edition' || inputs.server_edition == 'team' && 'mattermost-team-edition' || 'mattermost-enterprise-edition' }}:${{ inputs.server_image_tag }}"
jobs:
update-initial-status:
@ -106,7 +119,7 @@ jobs:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha }}
context: ${{ inputs.context_name }}
description: "with image tag: ${{ inputs.server_image_tag }}"
description: "tests running, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
status: pending
generate-test-cycle:
@ -114,19 +127,21 @@ jobs:
outputs:
status_check_url: "${{ steps.generate-cycle.outputs.status_check_url }}"
workers: "${{ steps.generate-workers.outputs.workers }}"
start_time: "${{ steps.generate-workers.outputs.start_time }}"
steps:
- name: ci/generate-workers
id: generate-workers
run: |
echo "workers=$(jq -nc '[range(${{ inputs.workers }})]')" >> $GITHUB_OUTPUT
echo "start_time=$(date +%s)" >> $GITHUB_OUTPUT
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
@ -154,7 +169,7 @@ jobs:
run-tests:
runs-on: ubuntu-24.04
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
timeout-minutes: 30
continue-on-error: ${{ inputs.workers > 1 }}
needs:
- generate-test-cycle
@ -187,7 +202,7 @@ jobs:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
@ -203,7 +218,7 @@ jobs:
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: cypress-${{ inputs.test_type }}-results-${{ matrix.worker_index }}
name: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-${{ matrix.worker_index }}
path: |
e2e-tests/cypress/logs/
e2e-tests/cypress/results/
@ -227,16 +242,15 @@ jobs:
total: ${{ steps.calculate.outputs.total }}
pass_rate: ${{ steps.calculate.outputs.pass_rate }}
color: ${{ steps.calculate.outputs.color }}
test_duration: ${{ steps.calculate.outputs.test_duration }}
end_time: ${{ steps.record-end-time.outputs.end_time }}
steps:
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: ci/download-results
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: cypress-${{ inputs.test_type }}-results-*
pattern: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-*
path: e2e-tests/cypress/
merge-multiple: true
- name: ci/calculate
@ -244,10 +258,13 @@ jobs:
uses: ./.github/actions/calculate-cypress-results
with:
original-results-path: e2e-tests/cypress/results
- name: ci/record-end-time
id: record-end-time
run: echo "end_time=$(date +%s)" >> $GITHUB_OUTPUT
run-failed-tests:
runs-on: ubuntu-24.04
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
timeout-minutes: 30
needs:
- generate-test-cycle
- run-tests
@ -279,7 +296,7 @@ jobs:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
@ -298,7 +315,7 @@ jobs:
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: cypress-${{ inputs.test_type }}-retest-results
name: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-retest-results
path: |
e2e-tests/cypress/logs/
e2e-tests/cypress/results/
@ -316,17 +333,17 @@ jobs:
passed: "${{ steps.final-results.outputs.passed }}"
failed: "${{ steps.final-results.outputs.failed }}"
commit_status_message: "${{ steps.final-results.outputs.commit_status_message }}"
duration: "${{ steps.duration.outputs.duration }}"
duration_display: "${{ steps.duration.outputs.duration_display }}"
retest_display: "${{ steps.duration.outputs.retest_display }}"
defaults:
run:
working-directory: e2e-tests
steps:
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
@ -337,7 +354,7 @@ jobs:
if: needs.run-failed-tests.result == 'skipped'
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: cypress-${{ inputs.test_type }}-results-*
pattern: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-*
path: e2e-tests/cypress/
merge-multiple: true
- name: ci/use-previous-calculation
@ -354,6 +371,7 @@ jobs:
echo "total=${{ needs.calculate-results.outputs.total }}" >> $GITHUB_OUTPUT
echo "pass_rate=${{ needs.calculate-results.outputs.pass_rate }}" >> $GITHUB_OUTPUT
echo "color=${{ needs.calculate-results.outputs.color }}" >> $GITHUB_OUTPUT
echo "test_duration=${{ needs.calculate-results.outputs.test_duration }}" >> $GITHUB_OUTPUT
{
echo "failed_tests<<EOF"
echo "${{ needs.calculate-results.outputs.failed_tests }}"
@ -365,14 +383,14 @@ jobs:
if: needs.run-failed-tests.result != 'skipped'
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: cypress-${{ inputs.test_type }}-results-*
pattern: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-*
path: e2e-tests/cypress/
merge-multiple: true
- name: ci/download-retest-results
if: needs.run-failed-tests.result != 'skipped'
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: cypress-${{ inputs.test_type }}-retest-results
name: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-retest-results
path: e2e-tests/cypress/retest-results/
- name: ci/calculate-results
if: needs.run-failed-tests.result != 'skipped'
@ -400,6 +418,7 @@ jobs:
echo "total=${{ steps.use-previous.outputs.total }}" >> $GITHUB_OUTPUT
echo "pass_rate=${{ steps.use-previous.outputs.pass_rate }}" >> $GITHUB_OUTPUT
echo "color=${{ steps.use-previous.outputs.color }}" >> $GITHUB_OUTPUT
echo "test_duration=${{ steps.use-previous.outputs.test_duration }}" >> $GITHUB_OUTPUT
{
echo "failed_tests<<EOF"
echo "$USE_PREVIOUS_FAILED_TESTS"
@ -416,6 +435,7 @@ jobs:
echo "total=${{ steps.recalculate.outputs.total }}" >> $GITHUB_OUTPUT
echo "pass_rate=${{ steps.recalculate.outputs.pass_rate }}" >> $GITHUB_OUTPUT
echo "color=${{ steps.recalculate.outputs.color }}" >> $GITHUB_OUTPUT
echo "test_duration=${{ steps.recalculate.outputs.test_duration }}" >> $GITHUB_OUTPUT
{
echo "failed_tests<<EOF"
echo "$RECALCULATE_FAILED_TESTS"
@ -423,11 +443,61 @@ jobs:
} >> $GITHUB_OUTPUT
fi
- name: ci/compute-duration
id: duration
env:
START_TIME: ${{ needs.generate-test-cycle.outputs.start_time }}
FIRST_PASS_END_TIME: ${{ needs.calculate-results.outputs.end_time }}
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
RETEST_SPEC_COUNT: ${{ needs.calculate-results.outputs.failed_specs_count }}
TEST_DURATION: ${{ steps.final-results.outputs.test_duration }}
run: |
NOW=$(date +%s)
ELAPSED=$((NOW - START_TIME))
MINUTES=$((ELAPSED / 60))
SECONDS=$((ELAPSED % 60))
DURATION="${MINUTES}m ${SECONDS}s"
# Compute first-pass and re-run durations
FIRST_PASS_ELAPSED=$((FIRST_PASS_END_TIME - START_TIME))
FP_MIN=$((FIRST_PASS_ELAPSED / 60))
FP_SEC=$((FIRST_PASS_ELAPSED % 60))
FIRST_PASS="${FP_MIN}m ${FP_SEC}s"
if [ "$RETEST_RESULT" != "skipped" ]; then
RERUN_ELAPSED=$((NOW - FIRST_PASS_END_TIME))
RR_MIN=$((RERUN_ELAPSED / 60))
RR_SEC=$((RERUN_ELAPSED % 60))
RUN_BREAKDOWN=" (first-pass: ${FIRST_PASS}, re-run: ${RR_MIN}m ${RR_SEC}s)"
else
RUN_BREAKDOWN=""
fi
# Duration icons: >20m high alert, >15m warning, otherwise clock
if [ "$MINUTES" -ge 20 ]; then
DURATION_DISPLAY=":rotating_light: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
elif [ "$MINUTES" -ge 15 ]; then
DURATION_DISPLAY=":warning: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
else
DURATION_DISPLAY=":clock3: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
fi
# Retest indicator with spec count
if [ "$RETEST_RESULT" != "skipped" ]; then
RETEST_DISPLAY=":repeat: re-run ${RETEST_SPEC_COUNT} spec(s)"
else
RETEST_DISPLAY=""
fi
echo "duration=${DURATION}" >> $GITHUB_OUTPUT
echo "duration_display=${DURATION_DISPLAY}" >> $GITHUB_OUTPUT
echo "retest_display=${RETEST_DISPLAY}" >> $GITHUB_OUTPUT
- name: ci/upload-combined-results
if: inputs.workers > 1
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: cypress-${{ inputs.test_type }}-results
name: cypress-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
path: |
e2e-tests/cypress/logs/
e2e-tests/cypress/results/
@ -435,18 +505,37 @@ jobs:
if: inputs.enable_reporting && env.REPORT_WEBHOOK_URL != ''
env:
REPORT_WEBHOOK_URL: ${{ secrets.REPORT_WEBHOOK_URL }}
PASS_RATE: ${{ steps.final-results.outputs.pass_rate }}
PASSED: ${{ steps.final-results.outputs.passed }}
TOTAL: ${{ steps.final-results.outputs.total }}
TOTAL_SPECS: ${{ steps.final-results.outputs.total_specs }}
COMMIT_STATUS_MESSAGE: ${{ steps.final-results.outputs.commit_status_message }}
COLOR: ${{ steps.final-results.outputs.color }}
REPORT_URL: ${{ needs.generate-test-cycle.outputs.status_check_url }}
TEST_TYPE: ${{ inputs.test_type }}
REPORT_TYPE: ${{ inputs.report_type }}
COMMIT_SHA: ${{ inputs.commit_sha }}
REF_BRANCH: ${{ inputs.ref_branch }}
PR_NUMBER: ${{ inputs.pr_number }}
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
RETEST_DISPLAY: ${{ steps.duration.outputs.retest_display }}
run: |
# Capitalize test type
TEST_TYPE_CAP=$(echo "$TEST_TYPE" | sed 's/.*/\u&/')
# Build source line based on report type
COMMIT_SHORT="${COMMIT_SHA::7}"
COMMIT_URL="https://github.com/${{ github.repository }}/commit/${COMMIT_SHA}"
if [ "$REPORT_TYPE" = "RELEASE_CUT" ]; then
SOURCE_LINE=":github_round: [${COMMIT_SHORT}](${COMMIT_URL}) on \`${REF_BRANCH}\`"
elif [ "$REPORT_TYPE" = "MASTER" ] || [ "$REPORT_TYPE" = "RELEASE" ]; then
SOURCE_LINE=":git_merge: [${COMMIT_SHORT}](${COMMIT_URL}) on \`${REF_BRANCH}\`"
else
SOURCE_LINE=":open-pull-request: [mattermost-pr-${PR_NUMBER}](https://github.com/${{ github.repository }}/pull/${PR_NUMBER})"
fi
# Build retest part for message
RETEST_PART=""
if [ -n "$RETEST_DISPLAY" ]; then
RETEST_PART=" | ${RETEST_DISPLAY}"
fi
# Build payload with attachments
PAYLOAD=$(cat <<EOF
{
@ -454,7 +543,7 @@ jobs:
"icon_url": "https://mattermost.com/wp-content/uploads/2022/02/icon_WS.png",
"attachments": [{
"color": "${COLOR}",
"text": "**Results - Cypress ${TEST_TYPE_CAP} Tests**\n\n:open-pull-request: [mattermost-pr-${PR_NUMBER}](https://github.com/${{ github.repository }}/pull/${PR_NUMBER})\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${PASS_RATE}% (${PASSED}/${TOTAL}) in ${TOTAL_SPECS} spec files | [full report](${REPORT_URL})"
"text": "**Results - Cypress ${TEST_TYPE_CAP} Tests**\n\n${SOURCE_LINE}\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${COMMIT_STATUS_MESSAGE}${RETEST_PART} | [full report](${REPORT_URL})\n${DURATION_DISPLAY}"
}]
}
EOF
@ -475,6 +564,8 @@ jobs:
FAILED_SPECS: ${{ steps.final-results.outputs.failed_specs }}
COMMIT_STATUS_MESSAGE: ${{ steps.final-results.outputs.commit_status_message }}
FAILED_TESTS: ${{ steps.final-results.outputs.failed_tests }}
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
run: |
{
echo "## E2E Test Results - Cypress ${TEST_TYPE}"
@ -504,6 +595,12 @@ jobs:
echo "| failed_specs_count | ${FAILED_SPECS_COUNT} |"
echo "| commit_status_message | ${COMMIT_STATUS_MESSAGE} |"
echo "| failed_specs | ${FAILED_SPECS:-none} |"
echo "| duration | ${DURATION_DISPLAY} |"
if [ "$RETEST_RESULT" != "skipped" ]; then
echo "| retested | Yes |"
else
echo "| retested | No |"
fi
echo ""
echo "---"
@ -528,7 +625,7 @@ jobs:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha }}
context: ${{ inputs.context_name }}
description: "${{ needs.report.outputs.commit_status_message }} with image tag: ${{ inputs.server_image_tag }}"
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
status: success
target_url: ${{ needs.generate-test-cycle.outputs.status_check_url }}
@ -547,6 +644,6 @@ jobs:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha }}
context: ${{ inputs.context_name }}
description: "${{ needs.report.outputs.commit_status_message }} with image tag: ${{ inputs.server_image_tag }}"
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
status: failure
target_url: ${{ needs.generate-test-cycle.outputs.status_check_url }}

View file

@ -24,6 +24,23 @@ on:
type: string
required: false
description: "Server image tag (e.g., master or short SHA)"
server_edition:
type: string
required: false
description: "Server edition: enterprise (default), fips, or team"
server_image_repo:
type: string
required: false
default: mattermostdevelopment
description: "Docker registry: mattermostdevelopment (default) or mattermost"
server_image_aliases:
type: string
required: false
description: "Comma-separated alias tags for context name (e.g., 'release-11.4, release-11')"
ref_branch:
type: string
required: false
description: "Source branch name for webhook messages (e.g., 'master' or 'release-11.4')"
secrets:
MM_LICENSE:
required: false
@ -47,6 +64,8 @@ jobs:
branch: "${{ steps.build-vars.outputs.branch }}"
build_id: "${{ steps.build-vars.outputs.build_id }}"
server_image_tag: "${{ steps.build-vars.outputs.server_image_tag }}"
server_image: "${{ steps.build-vars.outputs.server_image }}"
context_suffix: "${{ steps.build-vars.outputs.context_suffix }}"
steps:
- name: ci/generate-build-variables
id: build-vars
@ -63,63 +82,81 @@ jobs:
else
SERVER_IMAGE_TAG="${COMMIT_SHA::7}"
fi
# Validate server_image_tag format (alphanumeric, dots, hyphens, underscores)
if ! [[ "$SERVER_IMAGE_TAG" =~ ^[a-zA-Z0-9._-]+$ ]]; then
echo "::error::Invalid server_image_tag format: ${SERVER_IMAGE_TAG}"
exit 1
fi
echo "server_image_tag=${SERVER_IMAGE_TAG}" >> $GITHUB_OUTPUT
# Generate branch name
REF_BRANCH="${{ inputs.ref_branch }}"
if [ -n "$PR_NUMBER" ]; then
echo "branch=server-pr-${PR_NUMBER}" >> $GITHUB_OUTPUT
elif [ -n "$REF_BRANCH" ]; then
echo "branch=server-${REF_BRANCH}-${SERVER_IMAGE_TAG}" >> $GITHUB_OUTPUT
else
echo "branch=server-commit-${SERVER_IMAGE_TAG}" >> $GITHUB_OUTPUT
fi
# Generate build ID
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-cypress-onprem-ent" >> $GITHUB_OUTPUT
# Determine server image name
EDITION="${{ inputs.server_edition }}"
REPO="${{ inputs.server_image_repo }}"
REPO="${REPO:-mattermostdevelopment}"
case "$EDITION" in
fips) IMAGE_NAME="mattermost-enterprise-fips-edition" ;;
team) IMAGE_NAME="mattermost-team-edition" ;;
*) IMAGE_NAME="mattermost-enterprise-edition" ;;
esac
SERVER_IMAGE="${REPO}/${IMAGE_NAME}:${SERVER_IMAGE_TAG}"
echo "server_image=${SERVER_IMAGE}" >> $GITHUB_OUTPUT
cypress-smoke:
needs:
- generate-build-variables
uses: ./.github/workflows/e2e-tests-cypress-template.yml
with:
test_type: smoke
test_filter: "--stage=@prod --group=@smoke"
workers: 1
timeout_minutes: 30
enabled_docker_services: "postgres inbucket"
commit_sha: ${{ inputs.commit_sha }}
branch: ${{ needs.generate-build-variables.outputs.branch }}
build_id: ${{ needs.generate-build-variables.outputs.build_id }}
server_image_tag: ${{ needs.generate-build-variables.outputs.server_image_tag }}
server: ${{ inputs.server }}
context_name: "E2E Tests / cypress-smoke"
secrets:
MM_LICENSE: ${{ secrets.MM_LICENSE }}
AUTOMATION_DASHBOARD_URL: ${{ secrets.AUTOMATION_DASHBOARD_URL }}
AUTOMATION_DASHBOARD_TOKEN: ${{ secrets.AUTOMATION_DASHBOARD_TOKEN }}
PUSH_NOTIFICATION_SERVER: ${{ secrets.PUSH_NOTIFICATION_SERVER }}
CWS_URL: ${{ secrets.CWS_URL }}
CWS_EXTRA_HTTP_HEADERS: ${{ secrets.CWS_EXTRA_HTTP_HEADERS }}
# Validate server_image_aliases format if provided
ALIASES="${{ inputs.server_image_aliases }}"
if [ -n "$ALIASES" ] && ! [[ "$ALIASES" =~ ^[a-zA-Z0-9._,\ -]+$ ]]; then
echo "::error::Invalid server_image_aliases format: ${ALIASES}"
exit 1
fi
# Generate build ID
if [ -n "$EDITION" ] && [ "$EDITION" != "enterprise" ]; then
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-cypress-onprem-${EDITION}" >> $GITHUB_OUTPUT
else
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-cypress-onprem-ent" >> $GITHUB_OUTPUT
fi
# Generate context name suffix based on report type
REPORT_TYPE="${{ inputs.report_type }}"
case "$REPORT_TYPE" in
MASTER) echo "context_suffix=/master" >> $GITHUB_OUTPUT ;;
RELEASE) echo "context_suffix=/release" >> $GITHUB_OUTPUT ;;
RELEASE_CUT) echo "context_suffix=/release-cut" >> $GITHUB_OUTPUT ;;
*) echo "context_suffix=" >> $GITHUB_OUTPUT ;;
esac
cypress-full:
needs:
- cypress-smoke
- generate-build-variables
if: needs.cypress-smoke.outputs.failed == '0'
uses: ./.github/workflows/e2e-tests-cypress-template.yml
with:
test_type: full
test_filter: '--stage="@prod" --excludeGroup="@smoke,@te_only,@cloud_only,@high_availability" --sortFirst="@compliance_export,@elasticsearch,@ldap_group,@ldap" --sortLast="@saml,@keycloak,@plugin,@plugins_uninstall,@mfa,@license_removal"'
workers: 20
timeout_minutes: 60
test_filter: '--stage="@prod" --excludeGroup="@te_only,@cloud_only,@high_availability" --sortFirst="@compliance_export,@elasticsearch,@ldap_group,@ldap" --sortLast="@saml,@keycloak,@plugin,@plugins_uninstall,@mfa,@license_removal"'
workers: 40
enabled_docker_services: "postgres inbucket minio openldap elasticsearch keycloak"
commit_sha: ${{ inputs.commit_sha }}
branch: ${{ needs.generate-build-variables.outputs.branch }}
build_id: ${{ needs.generate-build-variables.outputs.build_id }}
server_image_tag: ${{ needs.generate-build-variables.outputs.server_image_tag }}
server_edition: ${{ inputs.server_edition }}
server_image_repo: ${{ inputs.server_image_repo }}
server_image_aliases: ${{ inputs.server_image_aliases }}
server: ${{ inputs.server }}
enable_reporting: ${{ inputs.enable_reporting }}
report_type: ${{ inputs.report_type }}
ref_branch: ${{ inputs.ref_branch }}
pr_number: ${{ inputs.pr_number }}
context_name: "E2E Tests / cypress-full"
context_name: "e2e-test/cypress-full/${{ inputs.server_edition || 'enterprise' }}${{ needs.generate-build-variables.outputs.context_suffix }}"
secrets:
MM_LICENSE: ${{ secrets.MM_LICENSE }}
AUTOMATION_DASHBOARD_URL: ${{ secrets.AUTOMATION_DASHBOARD_URL }}

130
.github/workflows/e2e-tests-on-merge.yml vendored Normal file
View file

@ -0,0 +1,130 @@
---
name: E2E Tests (master/release - merge)
on:
workflow_dispatch:
inputs:
branch:
type: string
required: true
description: "Branch name (e.g., 'master' or 'release-11.4')"
commit_sha:
type: string
required: true
description: "Commit SHA to test"
server_image_tag:
type: string
required: true
description: "Docker image tag (e.g., 'abc1234_def5678' or 'master')"
jobs:
generate-build-variables:
runs-on: ubuntu-24.04
outputs:
report_type: "${{ steps.vars.outputs.report_type }}"
ref_branch: "${{ steps.vars.outputs.ref_branch }}"
steps:
- name: ci/checkout-repo
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ inputs.branch }}
fetch-depth: 50
- name: ci/generate-variables
id: vars
env:
BRANCH: ${{ inputs.branch }}
COMMIT_SHA: ${{ inputs.commit_sha }}
run: |
# Strip refs/heads/ prefix if present
BRANCH="${BRANCH#refs/heads/}"
# Validate branch is master or release-X.Y
if [[ "$BRANCH" == "master" ]]; then
echo "report_type=MASTER" >> $GITHUB_OUTPUT
elif [[ "$BRANCH" =~ ^release-[0-9]+\.[0-9]+$ ]]; then
echo "report_type=RELEASE" >> $GITHUB_OUTPUT
else
echo "::error::Branch ${BRANCH} must be 'master' or 'release-X.Y' format."
exit 1
fi
echo "ref_branch=${BRANCH}" >> $GITHUB_OUTPUT
# Validate commit exists on the branch
if ! git merge-base --is-ancestor "$COMMIT_SHA" HEAD; then
echo "::error::Commit ${COMMIT_SHA} is not on branch ${BRANCH}."
exit 1
fi
# Enterprise Edition
e2e-cypress:
needs: generate-build-variables
uses: ./.github/workflows/e2e-tests-cypress.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server: onprem
enable_reporting: true
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
ref_branch: ${{ needs.generate-build-variables.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AUTOMATION_DASHBOARD_URL: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_TOKEN }}"
PUSH_NOTIFICATION_SERVER: "${{ secrets.MM_E2E_PUSH_NOTIFICATION_SERVER }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
CWS_URL: "${{ secrets.MM_E2E_CWS_URL }}"
CWS_EXTRA_HTTP_HEADERS: "${{ secrets.MM_E2E_CWS_EXTRA_HTTP_HEADERS }}"
e2e-playwright:
needs: generate-build-variables
uses: ./.github/workflows/e2e-tests-playwright.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server: onprem
enable_reporting: true
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
ref_branch: ${{ needs.generate-build-variables.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AWS_ACCESS_KEY_ID: "${{ secrets.CYPRESS_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.CYPRESS_AWS_SECRET_ACCESS_KEY }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
# Enterprise FIPS Edition
e2e-cypress-fips:
needs: generate-build-variables
uses: ./.github/workflows/e2e-tests-cypress.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server_edition: fips
server: onprem
enable_reporting: true
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
ref_branch: ${{ needs.generate-build-variables.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AUTOMATION_DASHBOARD_URL: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_TOKEN }}"
PUSH_NOTIFICATION_SERVER: "${{ secrets.MM_E2E_PUSH_NOTIFICATION_SERVER }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
CWS_URL: "${{ secrets.MM_E2E_CWS_URL }}"
CWS_EXTRA_HTTP_HEADERS: "${{ secrets.MM_E2E_CWS_EXTRA_HTTP_HEADERS }}"
e2e-playwright-fips:
needs: generate-build-variables
uses: ./.github/workflows/e2e-tests-playwright.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server_edition: fips
server: onprem
enable_reporting: true
report_type: ${{ needs.generate-build-variables.outputs.report_type }}
ref_branch: ${{ needs.generate-build-variables.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AWS_ACCESS_KEY_ID: "${{ secrets.CYPRESS_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.CYPRESS_AWS_SECRET_ACCESS_KEY }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"

View file

@ -0,0 +1,133 @@
---
name: E2E Tests (release cut)
on:
workflow_dispatch:
inputs:
branch:
type: string
required: true
description: "Release branch (e.g., 'release-11.4')"
commit_sha:
type: string
required: true
description: "Commit SHA to test"
server_image_tag:
type: string
required: true
description: "Docker image tag (e.g., '11.4.0', '11.4.0-rc3', or 'release-11.4')"
server_image_aliases:
type: string
required: false
description: "Comma-separated alias tags (e.g., 'release-11.4, release-11')"
jobs:
validate:
runs-on: ubuntu-24.04
outputs:
ref_branch: "${{ steps.check.outputs.ref_branch }}"
steps:
- name: ci/checkout-repo
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ inputs.branch }}
fetch-depth: 50
- name: ci/validate-inputs
id: check
env:
BRANCH: ${{ inputs.branch }}
COMMIT_SHA: ${{ inputs.commit_sha }}
run: |
# Strip refs/heads/ prefix if present
BRANCH="${BRANCH#refs/heads/}"
if ! [[ "$BRANCH" =~ ^release-[0-9]+\.[0-9]+$ ]]; then
echo "::error::Branch ${BRANCH} must be 'release-X.Y' format."
exit 1
elif ! git merge-base --is-ancestor "$COMMIT_SHA" HEAD; then
echo "::error::Commit ${COMMIT_SHA} is not on branch ${BRANCH}."
exit 1
fi
echo "ref_branch=${BRANCH}" >> $GITHUB_OUTPUT
# Enterprise Edition
e2e-cypress:
needs: validate
uses: ./.github/workflows/e2e-tests-cypress.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server_image_repo: mattermost
server_image_aliases: ${{ inputs.server_image_aliases }}
server: onprem
enable_reporting: true
report_type: RELEASE_CUT
ref_branch: ${{ needs.validate.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AUTOMATION_DASHBOARD_URL: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_TOKEN }}"
PUSH_NOTIFICATION_SERVER: "${{ secrets.MM_E2E_PUSH_NOTIFICATION_SERVER }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
CWS_URL: "${{ secrets.MM_E2E_CWS_URL }}"
CWS_EXTRA_HTTP_HEADERS: "${{ secrets.MM_E2E_CWS_EXTRA_HTTP_HEADERS }}"
e2e-playwright:
needs: validate
uses: ./.github/workflows/e2e-tests-playwright.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server_image_repo: mattermost
server_image_aliases: ${{ inputs.server_image_aliases }}
server: onprem
enable_reporting: true
report_type: RELEASE_CUT
ref_branch: ${{ needs.validate.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AWS_ACCESS_KEY_ID: "${{ secrets.CYPRESS_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.CYPRESS_AWS_SECRET_ACCESS_KEY }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
# Enterprise FIPS Edition
e2e-cypress-fips:
needs: validate
uses: ./.github/workflows/e2e-tests-cypress.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server_edition: fips
server_image_repo: mattermost
server_image_aliases: ${{ inputs.server_image_aliases }}
server: onprem
enable_reporting: true
report_type: RELEASE_CUT
ref_branch: ${{ needs.validate.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AUTOMATION_DASHBOARD_URL: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_URL }}"
AUTOMATION_DASHBOARD_TOKEN: "${{ secrets.MM_E2E_AUTOMATION_DASHBOARD_TOKEN }}"
PUSH_NOTIFICATION_SERVER: "${{ secrets.MM_E2E_PUSH_NOTIFICATION_SERVER }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"
CWS_URL: "${{ secrets.MM_E2E_CWS_URL }}"
CWS_EXTRA_HTTP_HEADERS: "${{ secrets.MM_E2E_CWS_EXTRA_HTTP_HEADERS }}"
e2e-playwright-fips:
needs: validate
uses: ./.github/workflows/e2e-tests-playwright.yml
with:
commit_sha: ${{ inputs.commit_sha }}
server_image_tag: ${{ inputs.server_image_tag }}
server_edition: fips
server_image_repo: mattermost
server_image_aliases: ${{ inputs.server_image_aliases }}
server: onprem
enable_reporting: true
report_type: RELEASE_CUT
ref_branch: ${{ needs.validate.outputs.ref_branch }}
secrets:
MM_LICENSE: "${{ secrets.MM_E2E_TEST_LICENSE_ONPREM_ENT }}"
AWS_ACCESS_KEY_ID: "${{ secrets.CYPRESS_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.CYPRESS_AWS_SECRET_ACCESS_KEY }}"
REPORT_WEBHOOK_URL: "${{ secrets.MM_E2E_REPORT_WEBHOOK_URL }}"

View file

@ -37,7 +37,7 @@ jobs:
COMMIT_SHA: ${{ steps.pr-info.outputs.head_sha }}
run: |
# Only full tests can be overridden (smoke tests must pass)
FULL_TEST_CONTEXTS=("E2E Tests / playwright-full" "E2E Tests / cypress-full")
FULL_TEST_CONTEXTS=("e2e-test/playwright-full/enterprise" "e2e-test/cypress-full/enterprise")
for CONTEXT_NAME in "${FULL_TEST_CONTEXTS[@]}"; do
echo "Checking: $CONTEXT_NAME"

View file

@ -12,11 +12,11 @@ on:
description: "Test filter arguments (e.g., --grep @smoke)"
type: string
required: true
timeout_minutes:
description: "Job timeout in minutes"
workers:
description: "Number of parallel shards"
type: number
required: false
default: 60
default: 2
enabled_docker_services:
description: "Space-separated list of docker services to enable"
type: string
@ -41,6 +41,20 @@ on:
type: string
required: false
default: onprem
server_edition:
description: "Server edition: enterprise (default), fips, or team"
type: string
required: false
default: enterprise
server_image_repo:
description: "Docker registry: mattermostdevelopment (default) or mattermost"
type: string
required: false
default: mattermostdevelopment
server_image_aliases:
description: "Comma-separated alias tags for description (e.g., 'release-11.4, release-11')"
type: string
required: false
# Reporting options
enable_reporting:
@ -50,6 +64,10 @@ on:
report_type:
type: string
required: false
ref_branch:
description: "Source branch name for webhook messages (e.g., 'master' or 'release-11.4')"
type: string
required: false
pr_number:
type: string
required: false
@ -82,7 +100,7 @@ on:
required: true
env:
SERVER_IMAGE: "mattermostdevelopment/mattermost-enterprise-edition:${{ inputs.server_image_tag }}"
SERVER_IMAGE: "${{ inputs.server_image_repo }}/${{ inputs.server_edition == 'fips' && 'mattermost-enterprise-fips-edition' || inputs.server_edition == 'team' && 'mattermost-team-edition' || 'mattermost-enterprise-edition' }}:${{ inputs.server_image_tag }}"
jobs:
update-initial-status:
@ -96,12 +114,32 @@ jobs:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha }}
context: ${{ inputs.context_name }}
description: "with image tag: ${{ inputs.server_image_tag }}"
description: "tests running, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
status: pending
generate-test-variables:
runs-on: ubuntu-24.04
outputs:
workers: "${{ steps.generate-workers.outputs.workers }}"
start_time: "${{ steps.generate-workers.outputs.start_time }}"
steps:
- name: ci/generate-workers
id: generate-workers
run: |
echo "workers=$(jq -nc '[range(1; ${{ inputs.workers }} + 1)]')" >> $GITHUB_OUTPUT
echo "start_time=$(date +%s)" >> $GITHUB_OUTPUT
run-tests:
runs-on: ubuntu-24.04
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
timeout-minutes: 30
continue-on-error: true
needs:
- generate-test-variables
if: needs.generate-test-variables.result == 'success'
strategy:
fail-fast: false
matrix:
worker_index: ${{ fromJSON(needs.generate-test-variables.outputs.workers) }}
defaults:
run:
working-directory: e2e-tests
@ -111,16 +149,18 @@ jobs:
ENABLED_DOCKER_SERVICES: "${{ inputs.enabled_docker_services }}"
TEST: playwright
TEST_FILTER: "${{ inputs.test_filter }}"
PW_SHARD: "${{ format('--shard={0}/{1}', matrix.worker_index, inputs.workers) }}"
BRANCH: "${{ inputs.branch }}-${{ inputs.test_type }}"
BUILD_ID: "${{ inputs.build_id }}"
CI_BASE_URL: "${{ inputs.test_type }}-test-${{ matrix.worker_index }}"
steps:
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
@ -139,7 +179,7 @@ jobs:
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: playwright-${{ inputs.test_type }}-results
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-${{ matrix.worker_index }}
path: |
e2e-tests/playwright/logs/
e2e-tests/playwright/results/
@ -148,8 +188,9 @@ jobs:
calculate-results:
runs-on: ubuntu-24.04
needs:
- generate-test-variables
- run-tests
if: always()
if: always() && needs.generate-test-variables.result == 'success'
outputs:
passed: ${{ steps.calculate.outputs.passed }}
failed: ${{ steps.calculate.outputs.failed }}
@ -164,26 +205,49 @@ jobs:
pass_rate: ${{ steps.calculate.outputs.pass_rate }}
passing: ${{ steps.calculate.outputs.passing }}
color: ${{ steps.calculate.outputs.color }}
test_duration: ${{ steps.calculate.outputs.test_duration }}
end_time: ${{ steps.record-end-time.outputs.end_time }}
steps:
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: ci/setup-node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
- name: ci/download-results
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/playwright/package-lock.json"
- name: ci/download-shard-results
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: playwright-${{ inputs.test_type }}-results
path: e2e-tests/playwright/
pattern: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results-*
path: e2e-tests/playwright/shard-results/
merge-multiple: true
- name: ci/merge-shard-results
working-directory: e2e-tests/playwright
run: |
mkdir -p results/reporter
# Merge blob reports using Playwright merge-reports (per docs)
npm install --no-save @playwright/test
npx playwright merge-reports --config merge.config.mjs ./shard-results/results/blob-report/
- name: ci/calculate
id: calculate
uses: ./.github/actions/calculate-playwright-results
with:
original-results-path: e2e-tests/playwright/results/reporter/results.json
- name: ci/upload-merged-results
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
path: e2e-tests/playwright/results/
retention-days: 5
- name: ci/record-end-time
id: record-end-time
run: echo "end_time=$(date +%s)" >> $GITHUB_OUTPUT
run-failed-tests:
runs-on: ubuntu-24.04
timeout-minutes: ${{ fromJSON(inputs.timeout_minutes) }}
timeout-minutes: 30
needs:
- run-tests
- calculate-results
@ -204,12 +268,12 @@ jobs:
BUILD_ID: "${{ inputs.build_id }}-retest"
steps:
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
@ -231,7 +295,7 @@ jobs:
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: playwright-${{ inputs.test_type }}-retest-results
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-retest-results
path: |
e2e-tests/playwright/logs/
e2e-tests/playwright/results/
@ -240,6 +304,7 @@ jobs:
report:
runs-on: ubuntu-24.04
needs:
- generate-test-variables
- run-tests
- calculate-results
- run-failed-tests
@ -249,35 +314,35 @@ jobs:
failed: "${{ steps.final-results.outputs.failed }}"
commit_status_message: "${{ steps.final-results.outputs.commit_status_message }}"
report_url: "${{ steps.upload-to-s3.outputs.report_url }}"
duration: "${{ steps.duration.outputs.duration }}"
duration_display: "${{ steps.duration.outputs.duration_display }}"
retest_display: "${{ steps.duration.outputs.retest_display }}"
defaults:
run:
working-directory: e2e-tests
steps:
- name: ci/checkout-repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.commit_sha }}
fetch-depth: 0
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: ci/setup-node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: ".nvmrc"
cache: npm
cache-dependency-path: "e2e-tests/playwright/package-lock.json"
# Download original results (always needed)
# Download merged results (uploaded by calculate-results)
- name: ci/download-results
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: playwright-${{ inputs.test_type }}-results
path: e2e-tests/playwright/
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-results
path: e2e-tests/playwright/results/
# Download retest results (only if retest ran)
- name: ci/download-retest-results
if: needs.run-failed-tests.result != 'skipped'
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: playwright-${{ inputs.test_type }}-retest-results
name: playwright-${{ inputs.test_type }}-${{ inputs.server_edition }}-retest-results
path: e2e-tests/playwright/retest-results/
# Calculate results (with optional merge of retest results)
@ -305,7 +370,6 @@ jobs:
TEST_TYPE: "${{ inputs.test_type }}"
run: |
LOCAL_RESULTS_PATH="playwright/results/"
LOCAL_LOGS_PATH="playwright/logs/"
# Use PR number if available, otherwise use commit SHA prefix
if [ -n "$PR_NUMBER" ]; then
@ -321,22 +385,91 @@ jobs:
REPORT_URL="https://${AWS_S3_BUCKET}.s3.amazonaws.com/${S3_PATH}/results/reporter/index.html"
echo "report_url=$REPORT_URL" >> "$GITHUB_OUTPUT"
- name: ci/compute-duration
id: duration
env:
START_TIME: ${{ needs.generate-test-variables.outputs.start_time }}
FIRST_PASS_END_TIME: ${{ needs.calculate-results.outputs.end_time }}
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
RETEST_SPEC_COUNT: ${{ needs.calculate-results.outputs.failed_specs_count }}
TEST_DURATION: ${{ steps.final-results.outputs.test_duration }}
run: |
NOW=$(date +%s)
ELAPSED=$((NOW - START_TIME))
MINUTES=$((ELAPSED / 60))
SECONDS=$((ELAPSED % 60))
DURATION="${MINUTES}m ${SECONDS}s"
# Compute first-pass and re-run durations
FIRST_PASS_ELAPSED=$((FIRST_PASS_END_TIME - START_TIME))
FP_MIN=$((FIRST_PASS_ELAPSED / 60))
FP_SEC=$((FIRST_PASS_ELAPSED % 60))
FIRST_PASS="${FP_MIN}m ${FP_SEC}s"
if [ "$RETEST_RESULT" != "skipped" ]; then
RERUN_ELAPSED=$((NOW - FIRST_PASS_END_TIME))
RR_MIN=$((RERUN_ELAPSED / 60))
RR_SEC=$((RERUN_ELAPSED % 60))
RUN_BREAKDOWN=" (first-pass: ${FIRST_PASS}, re-run: ${RR_MIN}m ${RR_SEC}s)"
else
RUN_BREAKDOWN=""
fi
# Duration icons: >20m high alert, >15m warning, otherwise clock
if [ "$MINUTES" -ge 20 ]; then
DURATION_DISPLAY=":rotating_light: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
elif [ "$MINUTES" -ge 15 ]; then
DURATION_DISPLAY=":warning: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
else
DURATION_DISPLAY=":clock3: ${DURATION}${RUN_BREAKDOWN} | test: ${TEST_DURATION}"
fi
# Retest indicator with spec count
if [ "$RETEST_RESULT" != "skipped" ]; then
RETEST_DISPLAY=":repeat: re-run ${RETEST_SPEC_COUNT} spec(s)"
else
RETEST_DISPLAY=""
fi
echo "duration=${DURATION}" >> $GITHUB_OUTPUT
echo "duration_display=${DURATION_DISPLAY}" >> $GITHUB_OUTPUT
echo "retest_display=${RETEST_DISPLAY}" >> $GITHUB_OUTPUT
- name: ci/publish-report
if: inputs.enable_reporting && env.REPORT_WEBHOOK_URL != ''
env:
REPORT_WEBHOOK_URL: ${{ secrets.REPORT_WEBHOOK_URL }}
PASS_RATE: ${{ steps.final-results.outputs.pass_rate }}
PASSING: ${{ steps.final-results.outputs.passing }}
TOTAL: ${{ steps.final-results.outputs.total }}
TOTAL_SPECS: ${{ steps.final-results.outputs.total_specs }}
COMMIT_STATUS_MESSAGE: ${{ steps.final-results.outputs.commit_status_message }}
COLOR: ${{ steps.final-results.outputs.color }}
REPORT_URL: ${{ steps.upload-to-s3.outputs.report_url }}
TEST_TYPE: ${{ inputs.test_type }}
REPORT_TYPE: ${{ inputs.report_type }}
COMMIT_SHA: ${{ inputs.commit_sha }}
REF_BRANCH: ${{ inputs.ref_branch }}
PR_NUMBER: ${{ inputs.pr_number }}
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
RETEST_DISPLAY: ${{ steps.duration.outputs.retest_display }}
run: |
# Capitalize test type
TEST_TYPE_CAP=$(echo "$TEST_TYPE" | sed 's/.*/\u&/')
# Build source line based on report type
COMMIT_SHORT="${COMMIT_SHA::7}"
COMMIT_URL="https://github.com/${{ github.repository }}/commit/${COMMIT_SHA}"
if [ "$REPORT_TYPE" = "RELEASE_CUT" ]; then
SOURCE_LINE=":github_round: [${COMMIT_SHORT}](${COMMIT_URL}) on \`${REF_BRANCH}\`"
elif [ "$REPORT_TYPE" = "MASTER" ] || [ "$REPORT_TYPE" = "RELEASE" ]; then
SOURCE_LINE=":git_merge: [${COMMIT_SHORT}](${COMMIT_URL}) on \`${REF_BRANCH}\`"
else
SOURCE_LINE=":open-pull-request: [mattermost-pr-${PR_NUMBER}](https://github.com/${{ github.repository }}/pull/${PR_NUMBER})"
fi
# Build retest part for message
RETEST_PART=""
if [ -n "$RETEST_DISPLAY" ]; then
RETEST_PART=" | ${RETEST_DISPLAY}"
fi
# Build payload with attachments
PAYLOAD=$(cat <<EOF
{
@ -344,7 +477,7 @@ jobs:
"icon_url": "https://mattermost.com/wp-content/uploads/2022/02/icon_WS.png",
"attachments": [{
"color": "${COLOR}",
"text": "**Results - Playwright ${TEST_TYPE_CAP} Tests**\n\n:open-pull-request: [mattermost-pr-${PR_NUMBER}](https://github.com/${{ github.repository }}/pull/${PR_NUMBER})\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${PASS_RATE}% (${PASSING}/${TOTAL}) in ${TOTAL_SPECS} spec files | [full report](${REPORT_URL})"
"text": "**Results - Playwright ${TEST_TYPE_CAP} Tests**\n\n${SOURCE_LINE}\n:docker: \`${{ env.SERVER_IMAGE }}\`\n${COMMIT_STATUS_MESSAGE}${RETEST_PART} | [full report](${REPORT_URL})\n${DURATION_DISPLAY}"
}]
}
EOF
@ -366,6 +499,8 @@ jobs:
FAILED_SPECS: ${{ steps.final-results.outputs.failed_specs }}
COMMIT_STATUS_MESSAGE: ${{ steps.final-results.outputs.commit_status_message }}
FAILED_TESTS: ${{ steps.final-results.outputs.failed_tests }}
DURATION_DISPLAY: ${{ steps.duration.outputs.duration_display }}
RETEST_RESULT: ${{ needs.run-failed-tests.result }}
run: |
{
echo "## E2E Test Results - Playwright ${TEST_TYPE}"
@ -396,6 +531,12 @@ jobs:
echo "| failed_specs_count | ${FAILED_SPECS_COUNT} |"
echo "| commit_status_message | ${COMMIT_STATUS_MESSAGE} |"
echo "| failed_specs | ${FAILED_SPECS:-none} |"
echo "| duration | ${DURATION_DISPLAY} |"
if [ "$RETEST_RESULT" != "skipped" ]; then
echo "| retested | Yes |"
else
echo "| retested | No |"
fi
echo ""
echo "---"
@ -419,7 +560,7 @@ jobs:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha }}
context: ${{ inputs.context_name }}
description: "${{ needs.report.outputs.commit_status_message }} with image tag: ${{ inputs.server_image_tag }}"
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
status: success
target_url: ${{ needs.report.outputs.report_url }}
@ -437,6 +578,6 @@ jobs:
repository_full_name: ${{ github.repository }}
commit_sha: ${{ inputs.commit_sha }}
context: ${{ inputs.context_name }}
description: "${{ needs.report.outputs.commit_status_message }} with image tag: ${{ inputs.server_image_tag }}"
description: "${{ needs.report.outputs.commit_status_message }}, ${{ needs.report.outputs.duration }}, image_tag:${{ inputs.server_image_tag }}${{ inputs.server_image_aliases && format(' ({0})', inputs.server_image_aliases) || '' }}"
status: failure
target_url: ${{ needs.report.outputs.report_url }}

View file

@ -24,6 +24,23 @@ on:
type: string
required: false
description: "Server image tag (e.g., master or short SHA)"
server_edition:
type: string
required: false
description: "Server edition: enterprise (default), fips, or team"
server_image_repo:
type: string
required: false
default: mattermostdevelopment
description: "Docker registry: mattermostdevelopment (default) or mattermost"
server_image_aliases:
type: string
required: false
description: "Comma-separated alias tags for context name (e.g., 'release-11.4, release-11')"
ref_branch:
type: string
required: false
description: "Source branch name for webhook messages (e.g., 'master' or 'release-11.4')"
secrets:
MM_LICENSE:
required: false
@ -41,6 +58,8 @@ jobs:
branch: "${{ steps.build-vars.outputs.branch }}"
build_id: "${{ steps.build-vars.outputs.build_id }}"
server_image_tag: "${{ steps.build-vars.outputs.server_image_tag }}"
server_image: "${{ steps.build-vars.outputs.server_image }}"
context_suffix: "${{ steps.build-vars.outputs.context_suffix }}"
steps:
- name: ci/generate-build-variables
id: build-vars
@ -57,62 +76,81 @@ jobs:
else
SERVER_IMAGE_TAG="${COMMIT_SHA::7}"
fi
# Validate server_image_tag format (alphanumeric, dots, hyphens, underscores)
if ! [[ "$SERVER_IMAGE_TAG" =~ ^[a-zA-Z0-9._-]+$ ]]; then
echo "::error::Invalid server_image_tag format: ${SERVER_IMAGE_TAG}"
exit 1
fi
echo "server_image_tag=${SERVER_IMAGE_TAG}" >> $GITHUB_OUTPUT
# Generate branch name
REF_BRANCH="${{ inputs.ref_branch }}"
if [ -n "$PR_NUMBER" ]; then
echo "branch=server-pr-${PR_NUMBER}" >> $GITHUB_OUTPUT
elif [ -n "$REF_BRANCH" ]; then
echo "branch=server-${REF_BRANCH}-${SERVER_IMAGE_TAG}" >> $GITHUB_OUTPUT
else
echo "branch=server-commit-${SERVER_IMAGE_TAG}" >> $GITHUB_OUTPUT
fi
# Determine server image name
EDITION="${{ inputs.server_edition }}"
REPO="${{ inputs.server_image_repo }}"
REPO="${REPO:-mattermostdevelopment}"
case "$EDITION" in
fips) IMAGE_NAME="mattermost-enterprise-fips-edition" ;;
team) IMAGE_NAME="mattermost-team-edition" ;;
*) IMAGE_NAME="mattermost-enterprise-edition" ;;
esac
SERVER_IMAGE="${REPO}/${IMAGE_NAME}:${SERVER_IMAGE_TAG}"
echo "server_image=${SERVER_IMAGE}" >> $GITHUB_OUTPUT
# Validate server_image_aliases format if provided
ALIASES="${{ inputs.server_image_aliases }}"
if [ -n "$ALIASES" ] && ! [[ "$ALIASES" =~ ^[a-zA-Z0-9._,\ -]+$ ]]; then
echo "::error::Invalid server_image_aliases format: ${ALIASES}"
exit 1
fi
# Generate build ID
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-playwright-onprem-ent" >> $GITHUB_OUTPUT
if [ -n "$EDITION" ] && [ "$EDITION" != "enterprise" ]; then
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-playwright-onprem-${EDITION}" >> $GITHUB_OUTPUT
else
echo "build_id=${RUN_ID}_${RUN_ATTEMPT}-${SERVER_IMAGE_TAG}-playwright-onprem-ent" >> $GITHUB_OUTPUT
fi
playwright-smoke:
needs:
- generate-build-variables
uses: ./.github/workflows/e2e-tests-playwright-template.yml
with:
test_type: smoke
test_filter: "--grep @smoke"
timeout_minutes: 30
enabled_docker_services: "postgres inbucket"
commit_sha: ${{ inputs.commit_sha }}
branch: ${{ needs.generate-build-variables.outputs.branch }}
build_id: ${{ needs.generate-build-variables.outputs.build_id }}
server_image_tag: ${{ needs.generate-build-variables.outputs.server_image_tag }}
server: ${{ inputs.server }}
context_name: "E2E Tests / playwright-smoke"
pr_number: ${{ inputs.pr_number }}
secrets:
MM_LICENSE: ${{ secrets.MM_LICENSE }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# Generate context name suffix based on report type
REPORT_TYPE="${{ inputs.report_type }}"
case "$REPORT_TYPE" in
MASTER) echo "context_suffix=/master" >> $GITHUB_OUTPUT ;;
RELEASE) echo "context_suffix=/release" >> $GITHUB_OUTPUT ;;
RELEASE_CUT) echo "context_suffix=/release-cut" >> $GITHUB_OUTPUT ;;
*) echo "context_suffix=" >> $GITHUB_OUTPUT ;;
esac
# ════════════════════════════════════════════════════════════════════════════
# FULL TESTS (only if smoke passes and pr_number is provided)
# ════════════════════════════════════════════════════════════════════════════
playwright-full:
needs:
- playwright-smoke
- generate-build-variables
if: needs.playwright-smoke.outputs.failed == '0' && inputs.pr_number != ''
uses: ./.github/workflows/e2e-tests-playwright-template.yml
with:
test_type: full
test_filter: '--grep-invert "@smoke|@visual"'
timeout_minutes: 120
test_filter: '--grep-invert "@visual"'
workers: 4
enabled_docker_services: "postgres inbucket minio openldap elasticsearch keycloak"
commit_sha: ${{ inputs.commit_sha }}
branch: ${{ needs.generate-build-variables.outputs.branch }}
build_id: ${{ needs.generate-build-variables.outputs.build_id }}
server_image_tag: ${{ needs.generate-build-variables.outputs.server_image_tag }}
server_edition: ${{ inputs.server_edition }}
server_image_repo: ${{ inputs.server_image_repo }}
server_image_aliases: ${{ inputs.server_image_aliases }}
server: ${{ inputs.server }}
enable_reporting: ${{ inputs.enable_reporting }}
report_type: ${{ inputs.report_type }}
ref_branch: ${{ inputs.ref_branch }}
pr_number: ${{ inputs.pr_number }}
context_name: "E2E Tests / playwright-full"
context_name: "e2e-test/playwright-full/${{ inputs.server_edition || 'enterprise' }}${{ needs.generate-build-variables.outputs.context_suffix }}"
secrets:
MM_LICENSE: ${{ secrets.MM_LICENSE }}
REPORT_WEBHOOK_URL: ${{ secrets.REPORT_WEBHOOK_URL }}

View file

@ -34,7 +34,7 @@ jobs:
COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
run: |
# Only full tests can be overridden (smoke tests must pass)
FULL_TEST_CONTEXTS=("E2E Tests / playwright-full" "E2E Tests / cypress-full")
FULL_TEST_CONTEXTS=("e2e-test/playwright-full/enterprise" "e2e-test/cypress-full/enterprise")
OVERRIDDEN=""
WEBHOOK_DATA="[]"

View file

@ -79,13 +79,15 @@
$ref: "#/components/responses/Forbidden"
/api/v4/users/login/sso/code-exchange:
post:
deprecated: true
tags:
- users
summary: Exchange SSO login code for session tokens
description: >
Exchange a short-lived login_code for session tokens using SAML code exchange (mobile SSO flow).
This endpoint is part of the mobile SSO code-exchange flow to prevent tokens
from appearing in deep links.
**Deprecated:** This endpoint is deprecated and will be removed in a future release.
Mobile clients should use the direct SSO callback flow instead.
##### Permissions
@ -130,6 +132,8 @@
$ref: "#/components/responses/BadRequest"
"403":
$ref: "#/components/responses/Forbidden"
"410":
description: Endpoint is deprecated and disabled
/oauth/intune:
post:
tags:

View file

@ -262,17 +262,19 @@ $(if mme2e_is_token_in_list "webhook-interactions" "$ENABLED_DOCKER_SERVICES"; t
echo '
webhook-interactions:
image: node:${NODE_VERSION_REQUIRED}
command: sh -c "npm install --global --legacy-peer-deps && exec node webhook_serve.js"
command: sh -c "npm init -y > /dev/null && npm install express@5.1.0 axios@1.11.0 client-oauth2@github:larkox/js-client-oauth2#e24e2eb5dfcbbbb3a59d095e831dbe0012b0ac49 && exec node webhook_serve.js"
healthcheck:
test: ["CMD", "curl", "-s", "-o/dev/null", "127.0.0.1:3000"]
interval: 10s
timeout: 15s
retries: 12
working_dir: /cypress
working_dir: /webhook
network_mode: host
restart: on-failure
volumes:
- "../../e2e-tests/cypress/:/cypress:ro"'
- "../../e2e-tests/cypress/webhook_serve.js:/webhook/webhook_serve.js:ro"
- "../../e2e-tests/cypress/utils/:/webhook/utils:ro"
- "../../e2e-tests/cypress/tests/plugins/post_message_as.js:/webhook/tests/plugins/post_message_as.js:ro"'
fi)
$(if mme2e_is_token_in_list "playwright" "$ENABLED_DOCKER_SERVICES"; then

View file

@ -37,7 +37,7 @@ EOF
# Run Playwright test
# NB: do not exit the script if some testcases fail
${MME2E_DC_SERVER} exec -i -u "$MME2E_UID" -- playwright bash -c "cd e2e-tests/playwright && npm run test:ci -- ${TEST_FILTER}" | tee ../playwright/logs/playwright.log || true
${MME2E_DC_SERVER} exec -i -u "$MME2E_UID" -- playwright bash -c "cd e2e-tests/playwright && npm run test:ci -- ${TEST_FILTER} ${PW_SHARD:-}" | tee ../playwright/logs/playwright.log || true
# Collect run results
# Documentation on the results.json file: https://playwright.dev/docs/api/class-testcase#test-case-expected-status

View file

@ -45,3 +45,14 @@ for MIGRATION in migration_advanced_permissions_phase_2; do
mme2e_log "${MIGRATION}: completed."
done
mme2e_log "Mattermost container is running and healthy"
# Wait for webhook-interactions container if running cypress tests
if [ "$TEST" = "cypress" ]; then
mme2e_log "Checking webhook-interactions container health"
${MME2E_DC_SERVER} logs --no-log-prefix -- webhook-interactions 2>&1 | tail -5
if ! mme2e_wait_service_healthy webhook-interactions 2 10; then
mme2e_log "Webhook interactions container not healthy, retry attempts exhausted. Giving up." >&2
exit 1
fi
mme2e_log "Webhook interactions container is running and healthy"
fi

View file

@ -162,6 +162,9 @@ describe('Channel Type Conversion (Public to Private Only)', () => {
// Verify settings were saved
verifySettingsSaved();
// Verify the modal completely closed to avoid flakiness
cy.get('#confirmModal').should('not.exist');
};
// Function kept for potential future use but not used in current tests

View file

@ -158,6 +158,9 @@ describe('Group Message Conversion To Private Channel', () => {
// Open the GM
cy.visit(`/${testTeam1.name}/messages/${gm.name}`);
// Wait until the channel is loaded
cy.get('#channelHeaderDropdownButton').should('be.visible');
// convert via API call
const timestamp = Date.now();
cy.apiConvertGMToPrivateChannel(gm.id, testTeam2.id, `Channel ${timestamp}`, `c-${timestamp}`).then(() => {

View file

@ -1,94 +0,0 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
// ***************************************************************
// - [#] indicates a test step (e.g. # Go to a page)
// - [*] indicates an assertion (e.g. * Check the title)
// - Use element ID when selecting an element. Create one if none.
// ***************************************************************
// Stage: @prod
// Group: @channels @team_settings
import {getRandomId} from '../../../utils';
import * as TIMEOUTS from '../../../fixtures/timeouts';
describe('Team Settings', () => {
let newUser;
before(() => {
cy.apiInitSetup().then(({team}) => {
cy.apiCreateUser().then(({user}) => {
newUser = user;
});
cy.visit(`/${team.name}`);
});
});
it('MM-T388 - Invite new user to closed team with "Allow only users with a specific email domain to join this team" set to "sample.mattermost.com" AND include a non-sample.mattermost.com email address in the invites', () => {
const emailDomain = 'sample.mattermost.com';
const invalidEmail = `user.${getRandomId()}@invalid.com`;
const userDetailsString = `@${newUser.username} - ${newUser.first_name} ${newUser.last_name} (${newUser.nickname})`;
const inviteSuccessMessage = 'This member has been added to the team.';
const inviteFailedMessage = `The following email addresses do not belong to an accepted domain: ${invalidEmail}. Please contact your System Administrator for details.`;
// # Open team menu and click 'Team Settings'
cy.uiOpenTeamMenu('Team settings');
// * Check that the 'Team Settings' modal was opened
cy.get('#teamSettingsModal').should('exist').within(() => {
// # Go to Access section
cy.get('#accessButton').click();
cy.get('.access-allowed-domains-section').should('exist').within(() => {
// # Click on the 'Allow only users with a specific email domain to join this team' checkbox
cy.get('.mm-modal-generic-section-item__input-checkbox').should('not.be.checked').click();
});
// # Set 'sample.mattermost.com' as the only allowed email domain and save
cy.get('#allowedDomains').click().type(emailDomain).type(' ');
cy.findByText('Save').should('be.visible').click();
});
// # Close the modal
cy.findByLabelText('Close').click();
// * Wait for modal to be closed
cy.get('#teamSettingsModal').should('not.exist');
// # Open team menu and click 'Invite People'
cy.uiOpenTeamMenu('Invite people');
// # Invite user with valid email domain that is not in the team
inviteNewMemberToTeam(newUser.email);
// * Assert that the user has successfully been invited to the team
cy.get('.invitation-modal-confirm--sent').should('be.visible').within(() => {
cy.get('.username-or-icon').find('span').eq(0).should('have.text', userDetailsString);
cy.get('.InviteResultRow').find('.reason').should('have.text', inviteSuccessMessage);
});
// # Click on the 'Invite More People button'
cy.findByTestId('invite-more').click();
// # Invite a user with an invalid email domain (not sample.mattermost.com)
inviteNewMemberToTeam(invalidEmail);
// * Assert that the invite failed and the correct error message is shown
cy.get('.invitation-modal-confirm--not-sent').should('be.visible').within(() => {
cy.get('.username-or-icon').find('span').eq(1).should('have.text', invalidEmail);
cy.get('.InviteResultRow').find('.reason').should('have.text', inviteFailedMessage);
});
});
function inviteNewMemberToTeam(email) {
cy.wait(TIMEOUTS.HALF_SEC);
cy.findByRole('combobox', {name: 'Invite People'}).
typeWithForce(email).
wait(TIMEOUTS.HALF_SEC).
typeWithForce('{enter}');
cy.findByTestId('inviteButton').click();
}
});

View file

@ -156,7 +156,18 @@ Before running tests, a Mattermost server must be available. Two options:
1. **Page Object Pattern**: Always use page/component objects from the library. No static UI selectors should be in test files.
2. **Visual Testing**: For visual tests:
2. **Locator Priority**: Follow the Playwright recommended locator strategy (see [Playwright Locators Quick Guide](https://playwright.dev/docs/locators#quick-guide)). Use locators in this priority order:
1. `getByRole()` - Preferred. Locates by accessibility role and accessible name (e.g., `getByRole('button', {name: 'Submit'})`).
2. `getByText()` - Locates by visible text content.
3. `getByLabel()` - Locates form controls by their associated label text.
4. `getByPlaceholder()` - Locates inputs by placeholder text.
5. `getByAltText()` - Locates elements (usually images) by alt text.
6. `getByTitle()` - Locates by the `title` attribute.
7. `getByTestId()` - Last resort. Locates by `data-testid` attribute.
- **Avoid** CSS selectors (`.class`, `#id`), XPath, and raw `locator()` calls unless none of the above locators can identify the element.
- Use `{exact: true}` when the accessible name might partially match other elements (e.g., `getByRole('button', {name: 'Invite', exact: true})`).
3. **Visual Testing**: For visual tests:
- Place all visual tests in the `specs/visual/` directory
- Always include the `@visual` tag in the test tags array
- Run via Docker container for consistency to maintain screenshot integrity
@ -167,14 +178,14 @@ Before running tests, a Mattermost server must be available. Two options:
- Tests should only be run inside the Playwright Docker container
- Follow the visual test documentation format like other tests, with proper JSDoc and comments
3. **Test Title Validation with Claude Code**: When using Claude:
4. **Test Title Validation with Claude Code**: When using Claude:
- Run `claude spec/path/to/file.spec.ts` to check your test file
- Ask: "Check if test titles follow the format in CLAUDE.md"
- Claude will analyze each test title and suggest improvements
- Format should be action-oriented, feature-specific, context-aware, and outcome-focused
- Example: `creates scheduled message from channel and posts at scheduled time`
4. **Test Structure**:
5. **Test Structure**:
- Use descriptive test titles that follow this format:
- **Action-oriented**: Start with a verb that describes the main action
- **Feature-specific**: Include the feature or component being tested
@ -194,7 +205,7 @@ Before running tests, a Mattermost server must be available. Two options:
- Keep tests independent and isolated
- Use tags to categorize tests with `{tag: '@feature_name'}`
5. **Test Documentation Format**:
6. **Test Documentation Format**:
- Include JSDoc-style documentation before each test:
```typescript
/**
@ -231,12 +242,12 @@ Before running tests, a Mattermost server must be available. Two options:
- `// # descriptive action` - Comments that describe steps being taken (e.g., `// # Initialize user and login`)
- `// * descriptive verification` - Comments that describe assertions/checks (e.g., `// * Verify message appears in channel`)
6. **Browser Compatibility**:
7. **Browser Compatibility**:
- Tests run on Chrome, Firefox, and iPad by default
- Consider browser-specific behaviors for certain features
- Use `test.skip()` for browser-specific limitations
7. **Test Documentation Linting**:
8. **Test Documentation Linting**:
- Run `npm run lint:test-docs` to verify all spec files follow the documentation format
- The linter checks for proper JSDoc tags, test titles, feature tags, and action/verification comments
- This is also included in the standard `npm run check` command

View file

@ -111,6 +111,7 @@ const defaultServerConfig: AdminConfig = {
GoroutineHealthThreshold: -1,
EnableOAuthServiceProvider: true,
EnableDynamicClientRegistration: false,
DCRRedirectURIAllowlist: [],
EnableIncomingWebhooks: true,
EnableOutgoingWebhooks: true,
EnableOutgoingOAuthConnections: false,

View file

@ -6,11 +6,43 @@ import {Locator, expect} from '@playwright/test';
export default class InvitePeopleModal {
readonly container: Locator;
readonly closeButton: Locator;
readonly inviteInput: Locator;
readonly inviteButton: Locator;
readonly copyInviteLinkButton: Locator;
constructor(container: Locator) {
this.container = container;
this.closeButton = container.getByRole('button', {name: 'Close'});
this.inviteInput = container.getByRole('combobox', {name: 'Invite People'});
this.inviteButton = container.getByRole('button', {name: 'Invite', exact: true});
this.copyInviteLinkButton = container.getByText('Copy invite link');
}
async toBeVisible() {
await expect(this.container).toBeVisible();
}
async close() {
await this.closeButton.click();
}
/**
* Types an email or username into the react-select invite input,
* waits for a selectable option to load, selects it, then clicks the invite button.
*/
async inviteByEmail(email: string) {
await expect(this.inviteInput).toBeVisible();
await this.inviteInput.click();
await this.inviteInput.pressSequentially(email, {delay: 50});
// Wait for react-select to finish loading and show a selectable option
const listbox = this.container.getByRole('listbox');
await expect(listbox.getByRole('option').first()).toBeVisible({timeout: 5000});
await this.inviteInput.press('Enter');
await expect(this.inviteButton).toBeEnabled();
await this.inviteButton.click();
}
}

View file

@ -0,0 +1,56 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {Locator, expect} from '@playwright/test';
export default class MembersInvitedModal {
readonly container: Locator;
readonly doneButton: Locator;
readonly inviteMoreButton: Locator;
readonly sentSection: Locator;
readonly notSentSection: Locator;
constructor(container: Locator) {
this.container = container;
this.doneButton = container.getByRole('button', {name: 'Done'});
this.inviteMoreButton = container.getByRole('button', {name: 'Invite More People'});
this.sentSection = container.locator('.invitation-modal-confirm--sent');
this.notSentSection = container.locator('.invitation-modal-confirm--not-sent');
}
async toBeVisible() {
await expect(this.container).toBeVisible();
}
async close() {
await this.doneButton.click();
}
/**
* Returns the result reason text for a sent invite row.
*/
async getSentResultReason(): Promise<string> {
await expect(this.sentSection).toBeVisible();
return (await this.sentSection.locator('.InviteResultRow .reason').textContent()) ?? '';
}
/**
* Returns the result reason text for a not-sent invite row.
*/
async getNotSentResultReason(): Promise<string> {
await expect(this.notSentSection).toBeVisible();
return (await this.notSentSection.locator('.InviteResultRow .reason').textContent()) ?? '';
}
/**
* Clicks the "Invite More People" button to return to the invite form.
*/
async clickInviteMore() {
await expect(this.inviteMoreButton).toBeVisible();
await this.inviteMoreButton.click();
}
}

View file

@ -0,0 +1,55 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {Locator, expect} from '@playwright/test';
export default class AccessSettings {
readonly container: Locator;
readonly allowedDomainsCheckbox;
readonly allowedDomainsInput;
readonly allowOpenInviteCheckbox;
readonly regenerateButton;
constructor(container: Locator) {
this.container = container;
this.allowedDomainsCheckbox = container.locator('input[name="showAllowedDomains"]');
this.allowedDomainsInput = container.locator('#allowedDomains input');
this.allowOpenInviteCheckbox = container.locator('input[name="allowOpenInvite"]');
this.regenerateButton = container.locator('button[data-testid="regenerateButton"]');
}
async toBeVisible() {
await expect(this.container).toBeVisible();
}
async enableAllowedDomains() {
const isChecked = await this.allowedDomainsCheckbox.isChecked();
if (!isChecked) {
await this.allowedDomainsCheckbox.check();
}
}
async addDomain(domain: string) {
await expect(this.allowedDomainsInput).toBeVisible();
await this.allowedDomainsInput.fill(domain);
await this.allowedDomainsInput.press('Enter');
}
async removeDomain(domain: string) {
const removeButton = this.container.locator(`div[role="button"][aria-label*="Remove ${domain}"]`);
await expect(removeButton).toBeVisible();
await removeButton.click();
}
async toggleOpenInvite() {
await expect(this.allowOpenInviteCheckbox).toBeVisible();
await this.allowOpenInviteCheckbox.click();
}
async regenerateInviteId() {
await expect(this.regenerateButton).toBeVisible();
await this.regenerateButton.click();
}
}

View file

@ -0,0 +1,53 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {Locator, expect} from '@playwright/test';
export default class InfoSettings {
readonly container: Locator;
readonly nameInput;
readonly descriptionInput;
readonly uploadInput;
readonly removeImageButton;
readonly teamIconImage;
readonly teamIconInitial;
constructor(container: Locator) {
this.container = container;
this.nameInput = container.locator('input#teamName');
this.descriptionInput = container.locator('textarea#teamDescription');
this.uploadInput = container.locator('input[data-testid="uploadPicture"]');
this.removeImageButton = container.locator('button[data-testid="removeImageButton"]');
this.teamIconImage = container.locator('#teamIconImage');
this.teamIconInitial = container.locator('#teamIconInitial');
}
async toBeVisible() {
await expect(this.container).toBeVisible();
}
async updateName(name: string) {
await expect(this.nameInput).toBeVisible();
await this.nameInput.clear();
await this.nameInput.fill(name);
}
async updateDescription(description: string) {
await expect(this.descriptionInput).toBeVisible();
await this.descriptionInput.clear();
await this.descriptionInput.fill(description);
}
async uploadIcon(filePath: string) {
await this.uploadInput.setInputFiles(filePath);
await expect(this.teamIconImage).toBeVisible();
}
async removeIcon() {
await expect(this.removeImageButton).toBeVisible();
await this.removeImageButton.click();
await expect(this.teamIconInitial).toBeVisible();
}
}

View file

@ -3,14 +3,77 @@
import {Locator, expect} from '@playwright/test';
import InfoSettings from './info_settings';
import AccessSettings from './access_settings';
export default class TeamSettingsModal {
readonly container: Locator;
readonly closeButton;
readonly infoTab;
readonly accessTab;
readonly saveButton;
readonly undoButton;
readonly infoSettings;
readonly accessSettings;
constructor(container: Locator) {
this.container = container;
this.closeButton = container.locator('.modal-header button.close').first();
this.infoTab = container.locator('[data-testid="info-tab-button"]');
this.accessTab = container.locator('[data-testid="access-tab-button"]');
this.saveButton = container.locator('button[data-testid="SaveChangesPanel__save-btn"]');
this.undoButton = container.locator('button[data-testid="SaveChangesPanel__cancel-btn"]');
this.infoSettings = new InfoSettings(container);
this.accessSettings = new AccessSettings(container);
}
async toBeVisible() {
await expect(this.container).toBeVisible();
}
async close() {
await this.closeButton.click();
}
async openInfoTab(): Promise<InfoSettings> {
await expect(this.infoTab).toBeVisible();
await this.infoTab.click();
return this.infoSettings;
}
async openAccessTab(): Promise<AccessSettings> {
await expect(this.accessTab).toBeVisible();
await this.accessTab.click();
return this.accessSettings;
}
async save() {
await expect(this.saveButton).toBeVisible();
await this.saveButton.click();
}
async undo() {
await expect(this.undoButton).toBeVisible();
await this.undoButton.click();
}
async verifySavedMessage() {
const savedMessage = this.container.getByText('Settings saved');
await expect(savedMessage).toBeVisible({timeout: 5000});
}
async verifyUnsavedChanges() {
const warningText = this.container.locator('.SaveChangesPanel:has-text("You have unsaved changes")');
await expect(warningText).toBeVisible({timeout: 3000});
}
}

View file

@ -12,6 +12,7 @@ import ChannelSettingsModal from './channels/channel_settings/channel_settings_m
import DeletePostModal from './channels/delete_post_modal';
import FindChannelsModal from './channels/find_channels_modal';
import InvitePeopleModal from './channels/invite_people_modal';
import MembersInvitedModal from './channels/members_invited_modal';
import SettingsModal from './channels/settings/settings_modal';
import Footer from './footer';
import GlobalHeader from './global_header';
@ -69,6 +70,7 @@ const components = {
DeletePostModal,
DeleteScheduledPostModal,
InvitePeopleModal,
MembersInvitedModal,
SettingsModal,
PostDotMenu,
PostMenu,
@ -122,6 +124,7 @@ export {
DeletePostModal,
DeleteScheduledPostModal,
InvitePeopleModal,
MembersInvitedModal,
SettingsModal,
PostDotMenu,
PostMenu,

View file

@ -4,7 +4,15 @@
import {expect, Page} from '@playwright/test';
import {waitUntil} from 'async-wait-until';
import {ChannelsPost, ChannelSettingsModal, SettingsModal, components, InvitePeopleModal} from '@/ui/components';
import {
ChannelsPost,
ChannelSettingsModal,
MembersInvitedModal,
SettingsModal,
TeamSettingsModal,
components,
InvitePeopleModal,
} from '@/ui/components';
import {duration} from '@/util';
export default class ChannelsPage {
readonly channels = 'Channels';
@ -25,6 +33,7 @@ export default class ChannelsPage {
readonly deletePostModal;
readonly findChannelsModal;
public invitePeopleModal: InvitePeopleModal | undefined;
public membersInvitedModal: MembersInvitedModal | undefined;
readonly profileModal;
readonly settingsModal;
readonly teamSettingsModal;
@ -105,6 +114,13 @@ export default class ChannelsPage {
return this.invitePeopleModal;
}
async getMembersInvitedModal(teamDisplayName: string) {
this.membersInvitedModal = new components.MembersInvitedModal(
this.page.getByRole('dialog', {name: `invited to ${teamDisplayName}`}),
);
return this.membersInvitedModal;
}
async goto(teamName = '', channelName = '') {
let channelsUrl = '/';
if (teamName) {
@ -152,6 +168,14 @@ export default class ChannelsPage {
return {rootPost, sidebarRight, lastPost};
}
async openTeamSettings(): Promise<TeamSettingsModal> {
await this.page.locator('#sidebarTeamMenuButton').click();
await this.page.getByText('Team settings').first().click();
await this.teamSettingsModal.toBeVisible();
return this.teamSettingsModal;
}
async openChannelSettings(): Promise<ChannelSettingsModal> {
await this.centerView.header.openChannelMenu();
await this.page.locator('#channelSettings[role="menuitem"]').click();

View file

@ -0,0 +1,13 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
// Configuration for merging sharded blob reports via:
// npx playwright merge-reports --config merge.config.mjs ./all-blob-reports/
export default {
reporter: [
['html', {open: 'never', outputFolder: './results/reporter'}],
['json', {outputFile: './results/reporter/results.json'}],
['junit', {outputFile: './results/reporter/results.xml'}],
],
};

View file

@ -75,6 +75,7 @@ export default defineConfig({
},
],
reporter: [
...(testConfig.isCI ? [['blob', {outputDir: './results/blob-report'}] as const] : []),
['html', {open: 'never', outputFolder: './results/reporter'}],
['json', {outputFile: './results/reporter/results.json'}],
['junit', {outputFile: './results/reporter/results.xml'}],

View file

@ -69,16 +69,16 @@ test('Post actions tab support', async ({pw, axe}) => {
await channelsPage.postDotMenu.followMessageMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.markAsUnreadMenuItem).toBeFocused();
// * Should move focus to Remind after arrow down
await channelsPage.postDotMenu.markAsUnreadMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.remindMenuItem).toBeFocused();
// * Should move focus to Save after arrow down
await channelsPage.postDotMenu.remindMenuItem.press('ArrowDown');
await channelsPage.postDotMenu.markAsUnreadMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.saveMenuItem).toBeFocused();
// * Should move focus to Pin to Channel after arrow down
// * Should move focus to Remind after arrow down
await channelsPage.postDotMenu.saveMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.remindMenuItem).toBeFocused();
// * Should move focus to Pin to Channel after arrow down
await channelsPage.postDotMenu.remindMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.pinToChannelMenuItem).toBeFocused();
if (config.FeatureFlags['MoveThreadsEnabled'] && license.IsLicensed === 'true') {
@ -86,25 +86,25 @@ test('Post actions tab support', async ({pw, axe}) => {
await channelsPage.postDotMenu.pinToChannelMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.moveThreadMenuItem).toBeFocused();
// * Should move focus to Copy Link after arrow down
// * Should move focus to Copy Text after arrow down
await channelsPage.postDotMenu.moveThreadMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.copyLinkMenuItem).toBeFocused();
await expect(channelsPage.postDotMenu.copyTextMenuItem).toBeFocused();
} else {
// * Should move focus to Copy Link after arrow down
// * Should move focus to Copy Text after arrow down
await channelsPage.postDotMenu.pinToChannelMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.copyLinkMenuItem).toBeFocused();
await expect(channelsPage.postDotMenu.copyTextMenuItem).toBeFocused();
}
// * Should move focus to Copy Link after arrow down
await channelsPage.postDotMenu.copyTextMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.copyLinkMenuItem).toBeFocused();
// * Should move focus to Edit after arrow down
await channelsPage.postDotMenu.copyLinkMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.editMenuItem).toBeFocused();
// * Should move focus to Copy Text after arrow down
await channelsPage.postDotMenu.editMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.copyTextMenuItem).toBeFocused();
// * Should move focus to Delete after arrow down
await channelsPage.postDotMenu.copyTextMenuItem.press('ArrowDown');
await channelsPage.postDotMenu.editMenuItem.press('ArrowDown');
await expect(channelsPage.postDotMenu.deleteMenuItem).toBeFocused();
// * Then, should move focus back to Reply after arrow down

View file

@ -0,0 +1,77 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import {ChannelsPage, expect, test} from '@mattermost/playwright-lib';
/**
* @objective Verify that a user with a valid email domain can be invited to a closed team,
* and a user with an invalid email domain is rejected with the correct error message.
*/
test('MM-T388 Invite new user to closed team with email domain restriction', {tag: '@team_settings'}, async ({pw}) => {
const emailDomain = 'sample.mattermost.com';
// # Set up admin user and team
const {adminUser, adminClient, team} = await pw.initSetup();
// # Enable email invitations so the invite modal shows "Add email" option
await adminClient.patchConfig({
ServiceSettings: {EnableEmailInvitations: true},
});
// # Create a new user NOT on the team (default email is @sample.mattermost.com)
const newUser = await adminClient.createUser(await pw.random.user(), '', '');
const {page} = await pw.testBrowser.login(adminUser);
const channelsPage = new ChannelsPage(page);
// # Navigate to team
await channelsPage.goto(team.name);
await page.waitForLoadState('networkidle');
// # Open Team Settings Modal and go to Access tab
const teamSettings = await channelsPage.openTeamSettings();
const accessSettings = await teamSettings.openAccessTab();
// # Enable "Allow only users with a specific email domain" and add the domain
await accessSettings.enableAllowedDomains();
await accessSettings.addDomain(emailDomain);
// # Save changes
await teamSettings.save();
await teamSettings.verifySavedMessage();
// # Close the Team Settings modal and wait for it to disappear
await teamSettings.close();
await expect(teamSettings.container).not.toBeVisible();
// # Open team menu and click 'Invite People'
await channelsPage.sidebarLeft.teamMenuButton.click();
await channelsPage.teamMenu.toBeVisible();
await channelsPage.teamMenu.clickInvitePeople();
// # Get the invite people modal and invite user with valid email domain
const inviteModal = await channelsPage.getInvitePeopleModal(team.display_name);
await inviteModal.toBeVisible();
await inviteModal.inviteByEmail(newUser.email);
// * Verify that the user has been successfully invited to the team
const membersInvitedModal = await channelsPage.getMembersInvitedModal(team.display_name);
await membersInvitedModal.toBeVisible();
const sentReason = await membersInvitedModal.getSentResultReason();
expect(sentReason).toBe('This member has been added to the team.');
// # Click 'Invite More People' to return to the invite form
await membersInvitedModal.clickInviteMore();
// # Invite a user with an invalid email domain (not sample.mattermost.com)
const invalidEmail = `user.${await pw.random.id()}@invalid.com`;
await inviteModal.inviteByEmail(invalidEmail);
// * Verify that the invite failed with the correct domain restriction error
const membersInvitedModal2 = await channelsPage.getMembersInvitedModal(team.display_name);
await membersInvitedModal2.toBeVisible();
const notSentReason = await membersInvitedModal2.getNotSentResultReason();
expect(notSentReason).toContain(
`The following email addresses do not belong to an accepted domain: ${invalidEmail}.`,
);
});

View file

@ -1 +1 @@
1.24.11
1.24.13

View file

@ -154,8 +154,8 @@ TEMPLATES_DIR=templates
# Plugins Packages
PLUGIN_PACKAGES ?= $(PLUGIN_PACKAGES:)
PLUGIN_PACKAGES += mattermost-plugin-calls-v1.11.0
PLUGIN_PACKAGES += mattermost-plugin-github-v2.5.0
PLUGIN_PACKAGES += mattermost-plugin-calls-v1.11.1
PLUGIN_PACKAGES += mattermost-plugin-github-v2.6.0
PLUGIN_PACKAGES += mattermost-plugin-gitlab-v1.12.0
PLUGIN_PACKAGES += mattermost-plugin-jira-v4.5.1
PLUGIN_PACKAGES += mattermost-plugin-playbooks-v2.7.0
@ -174,7 +174,7 @@ PLUGIN_PACKAGES += mattermost-plugin-channel-export-v1.3.0
# download the package from to work. This will no longer be needed when we unify
# the way we pre-package FIPS and non-FIPS plugins.
ifeq ($(FIPS_ENABLED),true)
PLUGIN_PACKAGES = mattermost-plugin-playbooks-v2.6.2%2Bb8f2bd9-fips
PLUGIN_PACKAGES = mattermost-plugin-playbooks-v2.7.0%2B1031c5e-fips
PLUGIN_PACKAGES += mattermost-plugin-agents-v1.7.2%2B866e2dd-fips
PLUGIN_PACKAGES += mattermost-plugin-boards-v9.2.2%2B4282c63-fips
endif

View file

@ -1,4 +1,4 @@
FROM mattermost/golang-bullseye:1.24.11@sha256:648e6d4bd76751787cf8eb2674942f931a01043872ce15ac9501382dabcefbe8
FROM mattermost/golang-bullseye:1.24.13@sha256:d9d9a35369413840836f677db08beb0aec784a966fe2a1ba1e60dc9baa64e881
ARG NODE_VERSION=20.11.1
RUN apt-get update && apt-get install -y make git apt-transport-https ca-certificates curl software-properties-common build-essential zip xmlsec1 jq pgloader gnupg

View file

@ -1,4 +1,4 @@
FROM cgr.dev/mattermost.com/go-msft-fips:1.24.11-dev@sha256:181a7db41bbff8cf0e522bd5f951a44f2a39a5f58ca930930dfbecdc6b690272
FROM cgr.dev/mattermost.com/go-msft-fips:1.24.13-dev@sha256:46c7f9e469ab1c83a7c1f3d1dfdf9f0aee7ef8a1c93d39a2270af3560b4008b4
ARG NODE_VERSION=20.11.1
RUN apk add curl ca-certificates mailcap unrtf wv poppler-utils tzdata gpg xmlsec

View file

@ -220,6 +220,10 @@ func (api *API) APILocal(h handlerFunc, opts ...APIHandlerOption) http.Handler {
}
func (api *API) RateLimitedHandler(apiHandler http.Handler, settings model.RateLimitSettings) http.Handler {
if !*api.srv.Config().RateLimitSettings.Enable {
return apiHandler
}
settings.SetDefaults()
rateLimiter, err := app.NewRateLimiter(&settings, []string{})

View file

@ -16,6 +16,7 @@ import (
func (api *API) InitLicenseLocal() {
api.BaseRoutes.APIRoot.Handle("/license", api.APILocal(localAddLicense, handlerParamFileAPI)).Methods(http.MethodPost)
api.BaseRoutes.APIRoot.Handle("/license", api.APILocal(localRemoveLicense)).Methods(http.MethodDelete)
api.BaseRoutes.APIRoot.Handle("/license/client", api.APILocal(localGetClientLicense)).Methods(http.MethodGet)
}
func localAddLicense(c *Context, w http.ResponseWriter, r *http.Request) {
@ -94,3 +95,23 @@ func localRemoveLicense(c *Context, w http.ResponseWriter, r *http.Request) {
ReturnStatusOK(w)
}
func localGetClientLicense(c *Context, w http.ResponseWriter, r *http.Request) {
format := r.URL.Query().Get("format")
if format == "" {
c.Err = model.NewAppError("localGetClientLicense", "api.license.client.old_format.app_error", nil, "", http.StatusBadRequest)
return
}
if format != "old" {
c.SetInvalidParam("format")
return
}
clientLicense := c.App.Srv().ClientLicense()
if _, err := w.Write([]byte(model.MapToJSON(clientLicense))); err != nil {
c.Logger.Warn("Error while writing response", mlog.Err(err))
}
}

View file

@ -401,6 +401,22 @@ func registerOAuthClient(c *Context, w http.ResponseWriter, r *http.Request) {
return
}
// Enforce DCR redirect URI allowlist if configured
allowlist := c.App.Config().ServiceSettings.DCRRedirectURIAllowlist
if len(allowlist) > 0 {
for _, uri := range clientRequest.RedirectURIs {
if !model.RedirectURIMatchesAllowlist(uri, allowlist) {
dcrError := model.NewDCRError(model.DCRErrorInvalidRedirectURI, "One or more redirect URIs do not match the allowlist")
w.WriteHeader(http.StatusBadRequest)
if err := json.NewEncoder(w).Encode(dcrError); err != nil {
c.Logger.Warn("Error while writing response", mlog.Err(err))
}
return
}
}
}
// No user ID for DCR
userID := ""

View file

@ -4,7 +4,9 @@
package api4
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"os"
@ -779,6 +781,111 @@ func TestRegisterOAuthClient_DisabledFeatures(t *testing.T) {
CheckBadRequestStatus(t, resp)
}
func TestRegisterOAuthClient_RedirectURIAllowlist(t *testing.T) {
mainHelper.Parallel(t)
th := Setup(t)
client := th.Client
th.App.UpdateConfig(func(cfg *model.Config) {
*cfg.ServiceSettings.EnableOAuthServiceProvider = true
cfg.ServiceSettings.EnableDynamicClientRegistration = model.NewPointer(true)
})
t.Run("allowlist empty registration succeeds", func(t *testing.T) {
cfg := th.App.Config()
cfg.ServiceSettings.DCRRedirectURIAllowlist = []string{}
th.App.UpdateConfig(func(c *model.Config) { *c = *cfg })
request := &model.ClientRegistrationRequest{
RedirectURIs: []string{"https://example.com/callback"},
ClientName: model.NewPointer("Test Client"),
}
response, resp, err := client.RegisterOAuthClient(context.Background(), request)
require.NoError(t, err)
CheckCreatedStatus(t, resp)
require.NotNil(t, response)
assert.NotEmpty(t, response.ClientID)
})
t.Run("wildcard allowed URI succeeds", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.ServiceSettings.DCRRedirectURIAllowlist = []string{"https://example.com/*", "https://*.test.com/**"}
})
request := &model.ClientRegistrationRequest{
RedirectURIs: []string{"https://example.com/callback"},
ClientName: model.NewPointer("Test Client"),
}
response, resp, err := client.RegisterOAuthClient(context.Background(), request)
require.NoError(t, err)
CheckCreatedStatus(t, resp)
require.NotNil(t, response)
time.Sleep(time.Second) // avoid rate limit
request2 := &model.ClientRegistrationRequest{
RedirectURIs: []string{"https://app.test.com/deep/path/cb"},
ClientName: model.NewPointer("Test Client 2"),
}
response2, resp2, err2 := client.RegisterOAuthClient(context.Background(), request2)
require.NoError(t, err2)
CheckCreatedStatus(t, resp2)
require.NotNil(t, response2)
})
t.Run("disallowed URI returns 400 invalid_redirect_uri", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.ServiceSettings.DCRRedirectURIAllowlist = []string{"https://allowed.com/**"}
})
body, _ := json.Marshal(&model.ClientRegistrationRequest{
RedirectURIs: []string{"https://disallowed.com/callback"},
ClientName: model.NewPointer("Test Client"),
})
req, err := http.NewRequest(http.MethodPost, client.APIURL+"/oauth/apps/register", bytes.NewReader(body))
require.NoError(t, err)
req.Header.Set("Content-Type", "application/json")
if client.AuthToken != "" {
req.Header.Set(model.HeaderAuth, model.HeaderBearer+" "+client.AuthToken)
}
httpResp, err := client.HTTPClient.Do(req)
require.NoError(t, err)
defer httpResp.Body.Close()
require.Equal(t, http.StatusBadRequest, httpResp.StatusCode)
var dcrErr model.DCRError
jsonErr := json.NewDecoder(httpResp.Body).Decode(&dcrErr)
require.NoError(t, jsonErr)
assert.Equal(t, model.DCRErrorInvalidRedirectURI, dcrErr.Error)
assert.NotEmpty(t, dcrErr.ErrorDescription)
})
t.Run("multi redirect partial mismatch rejects request", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.ServiceSettings.DCRRedirectURIAllowlist = []string{"https://allowed.com/**"}
})
time.Sleep(time.Second)
body, _ := json.Marshal(&model.ClientRegistrationRequest{
RedirectURIs: []string{"https://allowed.com/cb1", "https://disallowed.com/cb2"},
ClientName: model.NewPointer("Test Client"),
})
req, err := http.NewRequest(http.MethodPost, client.APIURL+"/oauth/apps/register", bytes.NewReader(body))
require.NoError(t, err)
req.Header.Set("Content-Type", "application/json")
if client.AuthToken != "" {
req.Header.Set(model.HeaderAuth, model.HeaderBearer+" "+client.AuthToken)
}
httpResp, err := client.HTTPClient.Do(req)
require.NoError(t, err)
defer httpResp.Body.Close()
require.Equal(t, http.StatusBadRequest, httpResp.StatusCode)
var dcrErr model.DCRError
jsonErr := json.NewDecoder(httpResp.Body).Decode(&dcrErr)
require.NoError(t, jsonErr)
assert.Equal(t, model.DCRErrorInvalidRedirectURI, dcrErr.Error)
assert.NotEmpty(t, dcrErr.ErrorDescription)
})
}
func TestRegisterOAuthClient_PublicClient_Success(t *testing.T) {
// Test successful public client DCR registration
mainHelper.Parallel(t)

View file

@ -81,6 +81,11 @@ func createPostChecks(where string, c *Context, post *model.Post) {
}
postPriorityCheckWithContext(where, c, post.GetPriority(), post.RootId)
if c.Err != nil {
return
}
postBurnOnReadCheckWithContext(where, c, post, nil)
}
func createPost(c *Context, w http.ResponseWriter, r *http.Request) {

View file

@ -42,6 +42,14 @@ func postPriorityCheckWithContext(where string, c *Context, priority *model.Post
}
}
func postBurnOnReadCheckWithContext(where string, c *Context, post *model.Post, channel *model.Channel) {
appErr := app.PostBurnOnReadCheckWithApp(where, c.App, c.AppContext, post.UserId, post.ChannelId, post.Type, channel)
if appErr != nil {
appErr.Where = where
c.Err = appErr
}
}
// checkUploadFilePermissionForNewFiles checks upload_file permission only when
// adding new files to a post, preventing permission bypass via cross-channel file attachments.
func checkUploadFilePermissionForNewFiles(c *Context, newFileIds []string, originalPost *model.Post) {

View file

@ -40,6 +40,17 @@ func scheduledPostChecks(where string, c *Context, scheduledPost *model.Schedule
}
postPriorityCheckWithContext(where, c, scheduledPost.GetPriority(), scheduledPost.RootId)
if c.Err != nil {
return
}
// Validate burn-on-read restrictions for scheduled post
post := &model.Post{
ChannelId: scheduledPost.ChannelId,
UserId: scheduledPost.UserId,
Type: scheduledPost.Type,
}
postBurnOnReadCheckWithContext(where, c, post, nil)
}
func requireScheduledPostsEnabled(c *Context) {

View file

@ -66,7 +66,7 @@ func (api *API) InitUser() {
api.BaseRoutes.User.Handle("/mfa", api.APISessionRequiredMfa(updateUserMfa)).Methods(http.MethodPut)
api.BaseRoutes.User.Handle("/mfa/generate", api.APISessionRequiredMfa(generateMfaSecret)).Methods(http.MethodPost)
api.BaseRoutes.Users.Handle("/login", api.APIHandler(login)).Methods(http.MethodPost)
api.BaseRoutes.Users.Handle("/login", api.RateLimitedHandler(api.APIHandler(login), model.RateLimitSettings{PerSec: model.NewPointer(5), MaxBurst: model.NewPointer(10)})).Methods(http.MethodPost)
api.BaseRoutes.Users.Handle("/login/sso/code-exchange", api.APIHandler(loginSSOCodeExchange)).Methods(http.MethodPost)
api.BaseRoutes.Users.Handle("/login/desktop_token", api.RateLimitedHandler(api.APIHandler(loginWithDesktopToken), model.RateLimitSettings{PerSec: model.NewPointer(2), MaxBurst: model.NewPointer(1)})).Methods(http.MethodPost)
api.BaseRoutes.Users.Handle("/login/switch", api.APIHandler(switchAccountType)).Methods(http.MethodPost)
@ -116,12 +116,28 @@ func (api *API) InitUser() {
api.BaseRoutes.Users.Handle("/trigger-notify-admin-posts", api.APISessionRequired(handleTriggerNotifyAdminPosts)).Methods(http.MethodPost)
}
// loginSSOCodeExchange exchanges a short-lived login_code for session tokens (mobile SAML code exchange)
// loginSSOCodeExchange exchanges a short-lived login_code for session tokens.
//
// Deprecated: This endpoint is deprecated and will be removed in a future release.
// Mobile clients should use the direct SSO callback flow instead.
func loginSSOCodeExchange(c *Context, w http.ResponseWriter, r *http.Request) {
// Set deprecation headers to inform clients
w.Header().Set("Deprecation", "true")
if !c.App.Config().FeatureFlags.MobileSSOCodeExchange {
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "feature disabled", http.StatusBadRequest)
c.Logger.Warn("Deprecated endpoint called",
mlog.String("endpoint", "/login/sso/code-exchange"),
mlog.String("status", "disabled"),
)
c.Err = model.NewAppError("loginSSOCodeExchange", "api.user.login_sso_code_exchange.deprecated.app_error", nil, "", http.StatusGone)
return
}
c.Logger.Warn("Deprecated endpoint called",
mlog.String("endpoint", "/login/sso/code-exchange"),
mlog.String("status", "enabled but deprecated"),
)
props := model.MapFromJSON(r.Body)
loginCode := props["login_code"]
codeVerifier := props["code_verifier"]

View file

@ -8524,15 +8524,34 @@ func TestLoginWithDesktopToken(t *testing.T) {
})
}
func TestLoginSSOCodeExchangeDeprecated(t *testing.T) {
mainHelper.Parallel(t)
th := SetupConfig(t, func(cfg *model.Config) {
cfg.FeatureFlags.MobileSSOCodeExchange = false
}).InitBasic(t)
props := map[string]string{
"login_code": "test_code",
"code_verifier": "test_verifier",
"state": "test_state",
}
resp, err := th.Client.DoAPIPost(context.Background(), "/users/login/sso/code-exchange", model.MapToJSON(props))
require.Error(t, err)
require.Equal(t, http.StatusGone, resp.StatusCode)
assert.Equal(t, "true", resp.Header.Get("Deprecation"))
}
// TestLoginSSOCodeExchange tests the code-exchange endpoint when enabled via feature flag.
// Note: This endpoint is deprecated and disabled by default. These tests verify behavior
// when explicitly enabled via feature flag (for backwards compatibility during rollout).
func TestLoginSSOCodeExchange(t *testing.T) {
mainHelper.Parallel(t)
th := Setup(t).InitBasic(t)
th := SetupConfig(t, func(cfg *model.Config) {
cfg.FeatureFlags.MobileSSOCodeExchange = true
}).InitBasic(t)
t.Run("wrong token type cannot be used for code exchange", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.FeatureFlags.MobileSSOCodeExchange = true
})
token := model.NewToken(model.TokenTypeOAuth, "extra-data")
require.NoError(t, th.App.Srv().Store().Token().Save(token))
defer func() {
@ -8551,10 +8570,6 @@ func TestLoginSSOCodeExchange(t *testing.T) {
})
t.Run("successful code exchange with S256 challenge", func(t *testing.T) {
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.FeatureFlags.MobileSSOCodeExchange = true
})
samlUser := th.CreateUserWithAuth(t, model.UserAuthServiceSaml)
codeVerifier := "test_code_verifier_123456789"

View file

@ -551,7 +551,7 @@ func (a *App) PermanentDeleteFlaggedPost(rctx request.CTX, actionRequest *model.
if jsonErr != nil {
return model.NewAppError("PermanentlyRemoveFlaggedPost", "app.content_flagging.permanently_delete.marshal_comment.app_error", nil, "", http.StatusInternalServerError).Wrap(jsonErr)
}
// Storing marshalled content into RawMessage to ensure proper escaping of special characters and prevent
// Storing marshaled content into RawMessage to ensure proper escaping of special characters and prevent
// generating unsafe JSON values
commentJsonValue := json.RawMessage(commentBytes)
@ -565,61 +565,11 @@ func (a *App) PermanentDeleteFlaggedPost(rctx request.CTX, actionRequest *model.
return model.NewAppError("PermanentlyRemoveFlaggedPost", "api.content_flagging.error.post_not_in_progress", nil, "", http.StatusBadRequest)
}
editHistories, appErr := a.GetEditHistoryForPost(flaggedPost.Id)
if appErr != nil {
if appErr.StatusCode != http.StatusNotFound {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to get edit history for flaggedPost", mlog.Err(appErr), mlog.String("post_id", flaggedPost.Id))
}
}
for _, editHistory := range editHistories {
if filesDeleteAppErr := a.PermanentDeleteFilesByPost(rctx, editHistory.Id); filesDeleteAppErr != nil {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to permanently delete files for one of the edit history posts", mlog.Err(filesDeleteAppErr), mlog.String("post_id", editHistory.Id))
}
if deletePostAppErr := a.PermanentDeletePost(rctx, editHistory.Id, reviewerId); deletePostAppErr != nil {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to permanently delete one of the edit history posts", mlog.Err(deletePostAppErr), mlog.String("post_id", editHistory.Id))
}
}
if filesDeleteAppErr := a.PermanentDeleteFilesByPost(rctx, flaggedPost.Id); filesDeleteAppErr != nil {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to permanently delete files for the flaggedPost", mlog.Err(filesDeleteAppErr), mlog.String("post_id", flaggedPost.Id))
}
if err := a.DeletePriorityForPost(flaggedPost.Id); err != nil {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to delete flaggedPost priority for the flaggedPost", mlog.Err(err), mlog.String("post_id", flaggedPost.Id))
}
if err := a.Srv().Store().PostAcknowledgement().DeleteAllForPost(flaggedPost.Id); err != nil {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to delete flaggedPost acknowledgements for the flaggedPost", mlog.Err(err), mlog.String("post_id", flaggedPost.Id))
}
if err := a.Srv().Store().Post().DeleteAllPostRemindersForPost(flaggedPost.Id); err != nil {
rctx.Logger().Error("PermanentlyRemoveFlaggedPost: Failed to delete flaggedPost reminders for the flaggedPost", mlog.Err(err), mlog.String("post_id", flaggedPost.Id))
}
scrubPost(flaggedPost)
_, err := a.Srv().Store().Post().Overwrite(rctx, flaggedPost)
if err != nil {
return model.NewAppError("PermanentlyRemoveFlaggedPost", "app.content_flagging.permanently_delete.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
contentReviewBot, appErr := a.getContentReviewBot(rctx)
appErr = a.PermanentDeletePostDataRetainStub(rctx, flaggedPost, reviewerId)
if appErr != nil {
return appErr
}
// If the post is not already deleted, delete it now.
// This handles the case when "Hide message from channel while it is being reviewed" setting is set to false when the post was flagged.
if flaggedPost.DeleteAt == 0 {
// DeletePost is called to care of WebSocket events, cache invalidation, search index removal,
// persistent notification removal and other cleanup tasks that need to happen on post deletion.
_, appErr = a.DeletePost(rctx, flaggedPost.Id, contentReviewBot.UserId)
if appErr != nil {
return appErr
}
}
groupId, appErr := a.ContentFlaggingGroupId()
if appErr != nil {
return appErr
@ -654,7 +604,7 @@ func (a *App) PermanentDeleteFlaggedPost(rctx request.CTX, actionRequest *model.
},
}
_, err = a.Srv().propertyAccessService.CreatePropertyValues(anonymousCallerId, propertyValues)
_, err := a.Srv().propertyAccessService.CreatePropertyValues(anonymousCallerId, propertyValues)
if err != nil {
return model.NewAppError("PermanentlyRemoveFlaggedPost", "app.content_flagging.create_property_values.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
@ -685,6 +635,69 @@ func (a *App) PermanentDeleteFlaggedPost(rctx request.CTX, actionRequest *model.
return nil
}
func (a *App) PermanentDeletePostDataRetainStub(rctx request.CTX, post *model.Post, deleteByID string) *model.AppError {
// when a post is removed, the following things need to be done
// 1. Hard delete corresponding file infos - covered
// 2. Hard delete file infos associated to post's edit history - NA
// 3. Hard delete post's edit history - NA
// 4. Hard delete the files from file storage - covered
// 5. Hard delete post's priority data - missing
// 6. Hard delete post's post acknowledgements - missing
// 7. Hard delete post reminders - missing
// 8. Scrub the post's content - message, props - missing
editHistories, appErr := a.GetEditHistoryForPost(post.Id)
if appErr != nil {
if appErr.StatusCode != http.StatusNotFound {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to get edit history for post", mlog.Err(appErr), mlog.String("post_id", post.Id))
}
}
for _, editHistory := range editHistories {
if deletePostAppErr := a.PermanentDeletePost(rctx, editHistory.Id, deleteByID); deletePostAppErr != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to permanently delete one of the edit history posts", mlog.Err(deletePostAppErr), mlog.String("post_id", editHistory.Id))
}
}
if filesDeleteAppErr := a.PermanentDeleteFilesByPost(rctx, post.Id); filesDeleteAppErr != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to permanently delete files for the post", mlog.Err(filesDeleteAppErr), mlog.String("post_id", post.Id))
}
if err := a.DeletePriorityForPost(post.Id); err != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to delete post priority for the post", mlog.Err(err), mlog.String("post_id", post.Id))
}
if err := a.Srv().Store().PostAcknowledgement().DeleteAllForPost(post.Id); err != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to delete post acknowledgements for the post", mlog.Err(err), mlog.String("post_id", post.Id))
}
if err := a.Srv().Store().Post().DeleteAllPostRemindersForPost(post.Id); err != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to delete post reminders for the post", mlog.Err(err), mlog.String("post_id", post.Id))
}
if err := a.Srv().Store().Post().PermanentDeleteAssociatedData([]string{post.Id}); err != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to permanently delete associated data for the post", mlog.Err(err), mlog.String("post_id", post.Id))
}
scrubPost(post)
_, err := a.Srv().Store().Post().Overwrite(rctx, post)
if err != nil {
rctx.Logger().Error("PermanentDeletePostDataRetainStub: Failed to scrub post content", mlog.Err(err), mlog.String("post_id", post.Id))
}
// If the post is not already deleted, delete it now.
if post.DeleteAt == 0 {
// DeletePost is called to care of WebSocket events, cache invalidation, search index removal,
// persistent notification removal and other cleanup tasks that need to happen on post deletion.
_, appErr = a.DeletePost(rctx, post.Id, deleteByID)
if appErr != nil {
return appErr
}
}
return nil
}
func (a *App) KeepFlaggedPost(rctx request.CTX, actionRequest *model.FlagContentActionRequest, reviewerId string, flaggedPost *model.Post) *model.AppError {
// for keeping a flagged flaggedPost we need to-
// 1. Undelete the flaggedPost if it was deleted, that's it
@ -808,11 +821,17 @@ func (a *App) KeepFlaggedPost(rctx request.CTX, actionRequest *model.FlagContent
}
func scrubPost(post *model.Post) {
post.Message = "*Content deleted as part of Content Flagging review process*"
if post.Type == model.PostTypeBurnOnRead {
post.Message = "*Content deleted as part of burning the post*"
} else {
post.Message = "*Content deleted as part of Content Flagging review process*"
}
post.MessageSource = post.Message
post.Hashtags = ""
post.Metadata = nil
post.FileIds = []string{}
post.UpdateAt = model.GetMillis()
post.SetProps(make(map[string]any))
}

View file

@ -1738,6 +1738,28 @@ func TestImportImportUser(t *testing.T) {
assert.True(t, teamMember.SchemeGuest)
assert.Equal(t, "", channelMember.ExplicitRoles)
})
t.Run("import guest user without any team or channel memberships", func(t *testing.T) {
username := model.NewUsername()
guestData := &imports.UserImportData{
Username: &username,
Email: model.NewPointer(model.NewId() + "@example.com"),
Roles: model.NewPointer("system_guest"),
}
appErr := th.App.importUser(th.Context, guestData, false)
require.Nil(t, appErr, "Failed to import guest user without memberships")
user, appErr := th.App.GetUserByUsername(*guestData.Username)
require.Nil(t, appErr, "Failed to get user from database.")
assert.True(t, user.IsGuest(), "User should be a guest")
assert.Equal(t, "system_guest", user.Roles)
teams, appErr := th.App.GetTeamsForUser(user.Id)
require.Nil(t, appErr)
assert.Empty(t, teams, "Guest user should have no team memberships")
})
}
func TestImportUserTeams(t *testing.T) {

View file

@ -267,8 +267,8 @@ func ValidateUserImportData(data *UserImportData) *model.AppError {
return model.NewAppError("BulkImport", "app.import.validate_user_import_data.roles_invalid.error", nil, "", http.StatusBadRequest)
}
if !isValidGuestRoles(*data) {
return model.NewAppError("BulkImport", "app.import.validate_user_import_data.guest_roles_conflict.error", nil, "", http.StatusBadRequest)
if err := validateGuestRoles(*data); err != nil {
return err
}
if data.NotifyProps != nil {
@ -758,49 +758,77 @@ func isValidEmailBatchingInterval(emailInterval string) bool {
emailInterval == model.PreferenceEmailIntervalHour
}
// isValidGuestRoles checks if the user has both guest roles in the same team or channel.
// at this point we assume that the user has a valid role scheme.
func isValidGuestRoles(data UserImportData) bool {
// validateGuestRoles checks if the user has guest roles consistently across system, team, and channel levels.
// At this point, we assume that the user has a valid role scheme.
func validateGuestRoles(data UserImportData) *model.AppError {
if data.Roles == nil {
return true
return nil
}
isSystemGuest := model.IsInRole(*data.Roles, model.SystemGuestRoleId)
var isTeamGuest, isChannelGuest bool
if data.Teams != nil {
// counters for guest roles for teams and channels
// we expect the total count of guest roles to be equal to the total count of teams and channels
var gtc, ctc int
for _, team := range *data.Teams {
if team.Roles != nil && model.IsInRole(*team.Roles, model.TeamGuestRoleId) {
gtc++
}
// If user has no teams, they can still be a system guest without issue
if data.Teams == nil || len(*data.Teams) == 0 {
return nil
}
var isTeamGuest, isChannelGuest bool
var hasChannels bool // hasChannels indicates if the user has any channels within their teams
var teamGuestCount, channelGuestCount int
var totalTeams, totalChannels int
totalTeams = len(*data.Teams)
for _, team := range *data.Teams {
if team.Roles != nil && model.IsInRole(*team.Roles, model.TeamGuestRoleId) {
teamGuestCount++
}
if len(model.SafeDereference(team.Channels)) > 0 {
hasChannels = true
totalChannels += len(*team.Channels)
if team.Channels == nil || len(*team.Channels) == 0 {
continue
}
for _, channel := range *team.Channels {
if channel.Roles != nil && model.IsInRole(*channel.Roles, model.ChannelGuestRoleId) {
ctc++
channelGuestCount++
}
}
if ctc == len(*team.Channels) {
isChannelGuest = true
}
}
if gtc == len(*data.Teams) {
isTeamGuest = true
}
}
// basically we want to be sure if the user either fully guest in all 3 places or not at all
// (a | b | c) & !(a & b & c) -> 3-way XOR?
if (isSystemGuest || isTeamGuest || isChannelGuest) && !(isSystemGuest && isTeamGuest && isChannelGuest) {
return false
// Set flags based on whether all available teams/channels have guest roles
if totalTeams > 0 && teamGuestCount == totalTeams {
isTeamGuest = true
}
return true
if hasChannels && channelGuestCount == totalChannels {
isChannelGuest = true
}
// If the user is a system guest, they must have consistent guest roles in any teams/channels they belong to
if isSystemGuest {
// If they have teams, they must be a team guest in all teams
if totalTeams > 0 && !isTeamGuest {
return model.NewAppError("BulkImport", "app.import.validate_user_import_data.system_guest_missing_team_guest_roles.error", map[string]any{"TeamGuestCount": teamGuestCount, "TotalTeams": totalTeams}, "", http.StatusBadRequest)
}
// If they have channels, they must be a channel guest in all channels
if hasChannels && !isChannelGuest {
return model.NewAppError("BulkImport", "app.import.validate_user_import_data.system_guest_missing_channel_guest_roles.error", map[string]any{"ChannelGuestCount": channelGuestCount, "TotalChannels": totalChannels}, "", http.StatusBadRequest)
}
return nil
}
// If not a system guest, ensure consistency in the other direction
// If they're a team or channel guest, they must be a system guest
if (isTeamGuest || isChannelGuest) && !isSystemGuest {
if isTeamGuest {
return model.NewAppError("BulkImport", "app.import.validate_user_import_data.team_guest_missing_system_guest_role.error", nil, "", http.StatusBadRequest)
}
return model.NewAppError("BulkImport", "app.import.validate_user_import_data.channel_guest_missing_system_guest_role.error", nil, "", http.StatusBadRequest)
}
return nil
}
// ValidateAttachmentPathForImport joins 'path' to 'basePath' (defaulting to "." if empty) and ensures

View file

@ -1601,17 +1601,16 @@ func checkNoError(t *testing.T, err *model.AppError) {
require.Nil(t, err, "Unexpected Error: %v", err)
}
func TestIsValidGuestRoles(t *testing.T) {
func TestValidateGuestRoles(t *testing.T) {
testCases := []struct {
name string
input UserImportData
expected bool
name string
input UserImportData
expectError bool
}{
{
name: "Valid case: User is a guest in all places",
input: UserImportData{
Username: model.NewPointer("guest1"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
@ -1621,13 +1620,12 @@ func TestIsValidGuestRoles(t *testing.T) {
},
},
},
expected: true,
expectError: false,
},
{
name: "Invalid case: User is a guest in a team but not in another team",
input: UserImportData{
Username: model.NewPointer("mixeduser1"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
@ -1643,13 +1641,12 @@ func TestIsValidGuestRoles(t *testing.T) {
},
},
},
expected: false,
expectError: true,
},
{
name: "Invalid case: User is a guest in a team but not in another team and has no channel membership",
input: UserImportData{
Username: model.NewPointer("mixeduser2"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
@ -1663,21 +1660,27 @@ func TestIsValidGuestRoles(t *testing.T) {
},
},
},
expected: false,
expectError: true,
},
{
name: "Invalid case: User is system guest but not guest in team and channel",
name: "Valid case: User is system guest with no teams",
input: UserImportData{
Username: model.NewPointer("systemguestonly"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Roles: model.NewPointer(model.SystemGuestRoleId),
},
expected: false,
expectError: false,
},
{
name: "Valid case: User is system guest with empty teams array",
input: UserImportData{
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{},
},
expectError: false,
},
{
name: "Invalid case: User has mixed roles",
input: UserImportData{
Username: model.NewPointer("mixeduser3"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamUserRoleId),
@ -1687,20 +1690,42 @@ func TestIsValidGuestRoles(t *testing.T) {
},
},
},
expected: false,
expectError: true,
},
{
name: "Valid case: User does not have any role defined in any place",
name: "Valid case: User is system guest with team guest role but no channels",
input: UserImportData{
Username: model.NewPointer("noroleuser"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
Channels: &[]UserChannelImportData{},
},
},
},
expected: true,
expectError: false,
},
{
name: "Valid case: User is system guest with team guest role and nil channels",
input: UserImportData{
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
},
},
},
expectError: false,
},
{
name: "Valid case: User does not have any role defined in any place",
input: UserImportData{},
expectError: false,
},
{
name: "Valid case: User is not a guest in any place",
input: UserImportData{
Username: model.NewPointer("normaluser"),
Roles: model.NewPointer(model.SystemUserRoleId),
Roles: model.NewPointer(model.SystemUserRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamAdminRoleId),
@ -1710,81 +1735,18 @@ func TestIsValidGuestRoles(t *testing.T) {
},
},
},
expected: true,
},
{
name: "Valid case: User with team but nil channels array",
input: UserImportData{
Username: model.NewPointer("nilchannelsuser"),
Roles: model.NewPointer(model.SystemUserRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamUserRoleId),
Channels: nil,
},
},
},
expected: true,
},
{
name: "Invalid case: User is guest in channels but not in system or team",
input: UserImportData{
Username: model.NewPointer("testuser3"),
Roles: model.NewPointer(model.SystemUserRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamUserRoleId),
Channels: &[]UserChannelImportData{
{Roles: model.NewPointer(model.ChannelGuestRoleId)},
},
},
},
},
expected: false,
},
{
name: "Invalid case: User is system guest and team guest but has no channels",
input: UserImportData{
Username: model.NewPointer("testuser4"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
Channels: &[]UserChannelImportData{},
},
},
},
expected: false,
},
{
name: "Valid case: User is guest in all places with multiple teams and channels",
input: UserImportData{
Username: model.NewPointer("testuser5"),
Roles: model.NewPointer(model.SystemGuestRoleId),
Teams: &[]UserTeamImportData{
{
Roles: model.NewPointer(model.TeamGuestRoleId),
Channels: &[]UserChannelImportData{
{Roles: model.NewPointer(model.ChannelGuestRoleId)},
{Roles: model.NewPointer(model.ChannelGuestRoleId)},
},
},
{
Roles: model.NewPointer(model.TeamGuestRoleId),
Channels: &[]UserChannelImportData{
{Roles: model.NewPointer(model.ChannelGuestRoleId)},
},
},
},
},
expected: true,
expectError: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
result := isValidGuestRoles(tc.input)
assert.Equal(t, tc.expected, result, tc.name)
err := validateGuestRoles(tc.input)
if tc.expectError {
assert.NotNil(t, err, tc.name)
} else {
assert.Nil(t, err, tc.name)
}
})
}
}

View file

@ -107,6 +107,16 @@ func (ps *PlatformService) GetLogsSkipSend(rctx request.CTX, page, perPage int,
if *ps.Config().LogSettings.EnableFile {
ps.Log().Flush()
logFile := config.GetLogFileLocation(*ps.Config().LogSettings.FileLocation)
// Validate the file path to prevent arbitrary file reads
if err := ps.validateLogFilePath(logFile); err != nil {
rctx.Logger().Error("Blocked attempt to read log file outside allowed root",
mlog.String("path", logFile),
mlog.String("config_section", "LogSettings.FileLocation"),
mlog.Err(err))
return nil, model.NewAppError("getLogs", "api.admin.file_read_error", nil, "", http.StatusForbidden).Wrap(err)
}
file, err := os.Open(logFile)
if err != nil {
return nil, model.NewAppError("getLogs", "api.admin.file_read_error", nil, "", http.StatusInternalServerError).Wrap(err)
@ -218,7 +228,7 @@ func (ps *PlatformService) GetLogFile(rctx request.CTX) (*model.FileData, error)
// validateLogFilePath validates that a log file path is within the logging root directory.
// This prevents arbitrary file read/write vulnerabilities in logging configuration.
// The logging root is determined by MM_LOG_PATH environment variable or the default logs directory.
// Currently used to validate paths when reading logs via GetAdvancedLogs.
// Used to validate paths when reading logs via GetLogsSkipSend, GetLogFile, and GetAdvancedLogs.
// In future versions, this will also be used to validate paths when saving logging config.
func (ps *PlatformService) validateLogFilePath(filePath string) error {
// Get the logging root path (from env var or default logs directory)

View file

@ -102,6 +102,54 @@ func TestGetMattermostLog(t *testing.T) {
})
}
func TestGetLogsSkipSendPathValidation(t *testing.T) {
mainHelper.Parallel(t)
th := Setup(t)
t.Run("path validation prevents reading files outside log directory", func(t *testing.T) {
// Create a directory to use as the allowed log root
logDir, err := os.MkdirTemp("", "logs")
require.NoError(t, err)
t.Cleanup(func() {
th.Service.UpdateConfig(func(cfg *model.Config) {
*cfg.LogSettings.EnableFile = false
})
th.Service.Logger().Flush()
err = os.RemoveAll(logDir)
require.NoError(t, err)
})
// Set MM_LOG_PATH to restrict log file access to logDir
t.Setenv("MM_LOG_PATH", logDir)
// Create a directory outside the allowed log root
outsideDir, err := os.MkdirTemp("", "outside")
require.NoError(t, err)
t.Cleanup(func() {
err = os.RemoveAll(outsideDir)
require.NoError(t, err)
})
// Create a log file outside the allowed root that should not be readable
outsideLogLocation := config.GetLogFileLocation(outsideDir)
err = os.WriteFile(outsideLogLocation, []byte("secret data\n"), 0644)
require.NoError(t, err)
// Point FileLocation to the outside directory
th.Service.UpdateConfig(func(cfg *model.Config) {
*cfg.LogSettings.EnableFile = true
*cfg.LogSettings.FileLocation = outsideDir
})
// Should be blocked by path validation
lines, appErr := th.Service.GetLogsSkipSend(th.Context, 0, 10, &model.LogFilter{})
assert.Nil(t, lines)
require.NotNil(t, appErr)
assert.Equal(t, "api.admin.file_read_error", appErr.Id)
})
}
func TestGetAdvancedLogs(t *testing.T) {
mainHelper.Parallel(t)

View file

@ -257,7 +257,10 @@ func New(sc ServiceConfig, options ...Option) (*PlatformService, error) {
// Timer layer
// |
// Cache layer
ps.sqlStore, err = sqlstore.New(ps.Config().SqlSettings, ps.Log(), ps.metricsIFace, ps.storeOptions...)
opts := append(ps.storeOptions, sqlstore.WithFeatureFlags(func() *model.FeatureFlags {
return ps.Config().FeatureFlags
}))
ps.sqlStore, err = sqlstore.New(ps.Config().SqlSettings, ps.Log(), ps.metricsIFace, opts...)
if err != nil {
return nil, err
}

View file

@ -165,6 +165,12 @@ func (a *App) CreatePost(rctx request.CTX, post *model.Post, channel *model.Chan
return nil, false, model.NewAppError("CreatePost", "app.post.create_post.shared_dm_or_gm.app_error", nil, "", http.StatusBadRequest)
}
// Validate burn-on-read restrictions (self-DMs, DMs with bots)
err = PostBurnOnReadCheckWithApp("App.CreatePost", a, rctx, post.UserId, post.ChannelId, post.Type, channel)
if err != nil {
return nil, false, err
}
foundPost, err := a.deduplicateCreatePost(rctx, post)
if err != nil {
return nil, false, err
@ -423,12 +429,13 @@ func (a *App) CreatePost(rctx request.CTX, post *model.Post, channel *model.Chan
_, translateErr := a.AutoTranslation().Translate(rctx.Context(), model.TranslationObjectTypePost, rpost.Id, rpost.ChannelId, rpost.UserId, rpost)
if translateErr != nil {
var notAvailErr *model.ErrAutoTranslationNotAvailable
if errors.As(translateErr, &notAvailErr) {
switch {
case errors.As(translateErr, &notAvailErr):
// Feature not available - log at debug level and continue
rctx.Logger().Debug("Auto-translation feature not available", mlog.String("post_id", rpost.Id), mlog.Err(translateErr))
} else if translateErr.Id == "ent.autotranslation.no_translatable_content" {
case translateErr.Id == "ent.autotranslation.no_translatable_content":
// No translatable content (only URLs/mentions) - this is expected, don't log
} else {
default:
// Unexpected error - log at warn level but don't fail post creation
rctx.Logger().Warn("Failed to translate post", mlog.String("post_id", rpost.Id), mlog.Err(translateErr))
}
@ -915,12 +922,13 @@ func (a *App) UpdatePost(rctx request.CTX, receivedUpdatedPost *model.Post, upda
_, translateErr := a.AutoTranslation().Translate(rctx.Context(), model.TranslationObjectTypePost, rpost.Id, rpost.ChannelId, rpost.UserId, rpost)
if translateErr != nil {
var notAvailErr *model.ErrAutoTranslationNotAvailable
if errors.As(translateErr, &notAvailErr) {
switch {
case errors.As(translateErr, &notAvailErr):
// Feature not available - log at debug level and continue
rctx.Logger().Debug("Auto-translation feature not available for edited post", mlog.String("post_id", rpost.Id), mlog.Err(translateErr))
} else if translateErr.Id == "ent.autotranslation.no_translatable_content" {
case translateErr.Id == "ent.autotranslation.no_translatable_content":
// No translatable content (only URLs/mentions) - this is expected, don't log
} else {
default:
// Unexpected error - log at warn level but don't fail post update
rctx.Logger().Warn("Failed to translate edited post", mlog.String("post_id", rpost.Id), mlog.Err(translateErr))
}
@ -1226,6 +1234,8 @@ func (a *App) GetPostsSince(rctx request.CTX, options model.GetPostsSinceOptions
return nil, model.NewAppError("GetPostsSince", "app.post.get_posts_since.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
a.supplementWithTranslationUpdatedPosts(rctx, postList, options.ChannelId, options.Time, options.CollapsedThreads)
if appErr := a.filterInaccessiblePosts(postList, filterPostOptions{assumeSortedCreatedAt: true}); appErr != nil {
return nil, appErr
}
@ -1241,6 +1251,73 @@ func (a *App) GetPostsSince(rctx request.CTX, options model.GetPostsSinceOptions
return postList, nil
}
// supplementWithTranslationUpdatedPosts finds posts whose translations were updated after `since`
// and adds them to the post list (Posts map only, not Order) so the client receives fresh translations.
func (a *App) supplementWithTranslationUpdatedPosts(rctx request.CTX, postList *model.PostList, channelID string, since int64, collapsedThreads bool) {
if a.AutoTranslation() == nil || !a.AutoTranslation().IsFeatureAvailable() {
return
}
userID := rctx.Session().UserId
userLang, appErr := a.AutoTranslation().GetUserLanguage(userID, channelID)
if appErr != nil {
rctx.Logger().Debug("Failed to get user language for translation-since supplement", mlog.String("channel_id", channelID), mlog.Err(appErr))
return
}
if userLang == "" {
return
}
translationsMap, err := a.Srv().Store().AutoTranslation().GetTranslationsSinceForChannel(channelID, userLang, since)
if err != nil {
rctx.Logger().Warn("Failed to get translations since for channel", mlog.String("channel_id", channelID), mlog.Err(err))
return
}
// Filter to post IDs not already in the post list
var missingPostIDs []string
for postID := range translationsMap {
if _, exists := postList.Posts[postID]; !exists {
missingPostIDs = append(missingPostIDs, postID)
}
}
if len(missingPostIDs) == 0 {
return
}
posts, err := a.Srv().Store().Post().GetPostsByIds(missingPostIDs)
if err != nil {
rctx.Logger().Warn("Failed to fetch posts for translation-since supplement", mlog.Err(err))
return
}
for _, post := range posts {
if post.DeleteAt != 0 {
continue
}
if collapsedThreads && post.RootId != "" {
continue
}
t, ok := translationsMap[post.Id]
if !ok {
continue
}
if post.Metadata == nil {
post.Metadata = &model.PostMetadata{}
}
if post.Metadata.Translations == nil {
post.Metadata.Translations = make(map[string]*model.PostTranslation)
}
post.Metadata.Translations[t.Lang] = t.ToPostTranslation()
// Add to Posts map only — not to Order — so the client gets the updated translation
// without changing the chronological post list.
postList.Posts[post.Id] = post
}
}
func (a *App) GetSinglePost(rctx request.CTX, postID string, includeDeleted bool) (*model.Post, *model.AppError) {
post, err := a.Srv().Store().Post().GetSingle(rctx, postID, includeDeleted)
if err != nil {
@ -1694,10 +1771,6 @@ func (a *App) DeletePost(rctx request.CTX, postID, deleteByID string) (*model.Po
return nil, model.NewAppError("DeletePost", "app.post.get.app_error", nil, "", http.StatusBadRequest).Wrap(err)
}
if post.Type == model.PostTypeBurnOnRead {
return nil, a.PermanentDeletePost(rctx, postID, deleteByID)
}
channel, appErr := a.GetChannel(rctx, post.ChannelId)
if appErr != nil {
return nil, appErr
@ -3703,7 +3776,7 @@ func (a *App) BurnPost(rctx request.CTX, post *model.Post, userID string, connec
// If user is the author, permanently delete the post
if post.UserId == userID {
return a.PermanentDeletePost(rctx, post.Id, userID)
return a.PermanentDeletePostDataRetainStub(rctx, post, userID)
}
// If not the author, check read receipt

View file

@ -372,6 +372,10 @@ func (a *App) revealBurnOnReadPostsForUser(rctx request.CTX, postList *model.Pos
}
for _, post := range postList.BurnOnReadPosts {
if post.DeleteAt > 0 {
continue
}
// If user is the author, reveal the post with recipients
if post.UserId == userID {
if err := a.revealPostForAuthor(rctx, postList, post); err != nil {

View file

@ -425,3 +425,39 @@ func Test_getInaccessibleRange(t *testing.T) {
})
}
}
func TestRevealBurnOnReadPostsForUser(t *testing.T) {
th := Setup(t).InitBasic(t)
// Enable BurnOnRead feature
th.App.Srv().SetLicense(model.NewTestLicenseSKU(model.LicenseShortSkuEnterpriseAdvanced))
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.FeatureFlags.BurnOnRead = true
cfg.ServiceSettings.EnableBurnOnRead = model.NewPointer(true)
})
t.Run("skips deleted burn-on-read post", func(t *testing.T) {
deletedPost := &model.Post{
Id: model.NewId(),
UserId: th.BasicUser.Id,
ChannelId: th.BasicChannel.Id,
Message: "deleted burn on read message",
Type: model.PostTypeBurnOnRead,
DeleteAt: model.GetMillis(),
CreateAt: model.GetMillis(),
}
postList := model.NewPostList()
postList.AddPost(deletedPost)
resultList, appErr := th.App.revealBurnOnReadPostsForUser(th.Context, postList, th.BasicUser2.Id)
require.Nil(t, appErr)
require.NotNil(t, resultList)
// The deleted post should remain in BurnOnReadPosts but not be processed
assert.Contains(t, resultList.BurnOnReadPosts, deletedPost.Id)
// Verify the post was not modified (still has DeleteAt set)
assert.Equal(t, deletedPost.DeleteAt, resultList.BurnOnReadPosts[deletedPost.Id].DeleteAt)
assert.Equal(t, deletedPost.Message, resultList.BurnOnReadPosts[deletedPost.Id].Message)
})
}

View file

@ -153,6 +153,9 @@ func (a *App) populatePostListTranslations(rctx request.CTX, list *model.PostLis
}
post.Metadata.Translations[t.Lang] = t.ToPostTranslation()
if t.UpdateAt > post.UpdateAt {
post.UpdateAt = t.UpdateAt
}
}
}
}
@ -842,6 +845,8 @@ func (a *App) getLinkMetadataForPermalink(rctx request.CTX, requestURL string) (
permalink = &model.Permalink{PreviewPost: model.NewPreviewPost(referencedPostWithMetadata, referencedTeam, referencedChannel)}
}
a.populatePostListTranslations(rctx, &model.PostList{Posts: map[string]*model.Post{permalink.PreviewPost.Post.Id: permalink.PreviewPost.Post}})
return permalink, nil
}

View file

@ -114,3 +114,47 @@ func userCreatePostPermissionCheckWithApp(rctx request.CTX, a *App, userId, chan
return nil
}
// PostBurnOnReadCheckWithApp validates whether a burn-on-read post can be created
// based on channel type and participants. This is called from the API layer before
// post creation to enforce burn-on-read restrictions.
func PostBurnOnReadCheckWithApp(where string, a *App, rctx request.CTX, userId, channelId, postType string, channel *model.Channel) *model.AppError {
// Only validate if this is a burn-on-read post
if postType != model.PostTypeBurnOnRead {
return nil
}
// Get channel if not provided
if channel == nil {
ch, err := a.GetChannel(rctx, channelId)
if err != nil {
return model.NewAppError(where, "api.post.fill_in_post_props.burn_on_read.channel.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
channel = ch
}
// Burn-on-read is not allowed in self-DMs or DMs with bots (including AI agents, plugins)
if channel.Type == model.ChannelTypeDirect {
// Check if it's a self-DM by comparing the channel name with the expected self-DM name
selfDMName := model.GetDMNameFromIds(userId, userId)
if channel.Name == selfDMName {
return model.NewAppError(where, "api.post.fill_in_post_props.burn_on_read.self_dm.app_error", nil, "", http.StatusBadRequest)
}
// Check if the DM is with a bot (AI agents, plugins, etc.)
otherUserId := channel.GetOtherUserIdForDM(userId)
if otherUserId != "" && otherUserId != userId {
otherUser, err := a.GetUser(otherUserId)
if err != nil {
// Failed to retrieve the other user (user not found, DB error, etc.)
// Block burn-on-read post as we cannot validate the recipient
return model.NewAppError(where, "api.post.fill_in_post_props.burn_on_read.user.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
}
if otherUser.IsBot {
return model.NewAppError(where, "api.post.fill_in_post_props.burn_on_read.bot_dm.app_error", nil, "", http.StatusBadRequest)
}
}
}
return nil
}

View file

@ -5367,3 +5367,262 @@ func TestGetFlaggedPostsWithExpiredBurnOnRead(t *testing.T) {
require.Greater(t, post.Metadata.ExpireAt, model.GetMillis())
})
}
func TestBurnOnReadRestrictionsForDMsAndBots(t *testing.T) {
os.Setenv("MM_FEATUREFLAGS_BURNONREAD", "true")
defer func() {
os.Unsetenv("MM_FEATUREFLAGS_BURNONREAD")
}()
th := Setup(t).InitBasic(t)
th.App.Srv().SetLicense(model.NewTestLicenseSKU(model.LicenseShortSkuEnterpriseAdvanced))
th.App.UpdateConfig(func(cfg *model.Config) {
cfg.ServiceSettings.EnableBurnOnRead = model.NewPointer(true)
cfg.ServiceSettings.BurnOnReadMaximumTimeToLiveSeconds = model.NewPointer(600)
cfg.ServiceSettings.BurnOnReadDurationSeconds = model.NewPointer(600)
})
t.Run("should allow burn-on-read posts in direct messages with another user", func(t *testing.T) {
// Create a direct message channel between two different users
dmChannel, appErr := th.App.GetOrCreateDirectChannel(th.Context, th.BasicUser.Id, th.BasicUser2.Id)
require.Nil(t, appErr)
require.Equal(t, model.ChannelTypeDirect, dmChannel.Type)
post := &model.Post{
ChannelId: dmChannel.Id,
Message: "This is a burn-on-read message in DM",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
createdPost, _, err := th.App.CreatePost(th.Context, post, dmChannel, model.CreatePostFlags{SetOnline: true})
require.Nil(t, err)
require.NotNil(t, createdPost)
require.Equal(t, model.PostTypeBurnOnRead, createdPost.Type)
})
t.Run("should allow burn-on-read posts in group messages", func(t *testing.T) {
// Create a group message channel with at least 3 users
user3 := th.CreateUser(t)
th.LinkUserToTeam(t, user3, th.BasicTeam)
gmChannel := th.CreateGroupChannel(t, th.BasicUser2, user3)
require.Equal(t, model.ChannelTypeGroup, gmChannel.Type)
// This should succeed - group messages allow BoR
post := &model.Post{
ChannelId: gmChannel.Id,
Message: "This is a burn-on-read message in GM",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
createdPost, _, err := th.App.CreatePost(th.Context, post, gmChannel, model.CreatePostFlags{SetOnline: true})
require.Nil(t, err)
require.NotNil(t, createdPost)
require.Equal(t, model.PostTypeBurnOnRead, createdPost.Type)
})
t.Run("should allow burn-on-read posts from bot users", func(t *testing.T) {
// Create a bot user
bot := &model.Bot{
Username: "testbot",
DisplayName: "Test Bot",
Description: "Test Bot for burn-on-read (bots can send BoR for OTP, integrations, etc.)",
OwnerId: th.BasicUser.Id,
}
createdBot, appErr := th.App.CreateBot(th.Context, bot)
require.Nil(t, appErr)
// Get the bot user
botUser, appErr := th.App.GetUser(createdBot.UserId)
require.Nil(t, appErr)
require.True(t, botUser.IsBot)
// Create a burn-on-read post as bot (should succeed - bots can send BoR)
post := &model.Post{
ChannelId: th.BasicChannel.Id,
Message: "This is a burn-on-read message from bot",
UserId: botUser.Id,
Type: model.PostTypeBurnOnRead,
}
createdPost, _, err := th.App.CreatePost(th.Context, post, th.BasicChannel, model.CreatePostFlags{SetOnline: true})
require.Nil(t, err)
require.NotNil(t, createdPost)
require.Equal(t, model.PostTypeBurnOnRead, createdPost.Type)
})
t.Run("should reject burn-on-read posts in self DMs", func(t *testing.T) {
// Create a self DM channel (user messaging themselves)
selfDMChannel, appErr := th.App.GetOrCreateDirectChannel(th.Context, th.BasicUser.Id, th.BasicUser.Id)
require.Nil(t, appErr)
require.Equal(t, model.ChannelTypeDirect, selfDMChannel.Type)
// Try to create a burn-on-read post in self DM
post := &model.Post{
ChannelId: selfDMChannel.Id,
Message: "This is a burn-on-read message to myself",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
_, _, err := th.App.CreatePost(th.Context, post, selfDMChannel, model.CreatePostFlags{SetOnline: true})
require.NotNil(t, err)
require.Equal(t, "api.post.fill_in_post_props.burn_on_read.self_dm.app_error", err.Id)
})
t.Run("should reject burn-on-read posts in DMs with bots/AI agents", func(t *testing.T) {
// Create a bot user
bot := &model.Bot{
Username: "aiagent",
DisplayName: "AI Agent",
Description: "Test AI Agent for burn-on-read restrictions",
OwnerId: th.BasicUser.Id,
}
createdBot, appErr := th.App.CreateBot(th.Context, bot)
require.Nil(t, appErr)
// Get the bot user
botUser, appErr := th.App.GetUser(createdBot.UserId)
require.Nil(t, appErr)
require.True(t, botUser.IsBot)
// Create a DM channel between the regular user and the bot
dmWithBotChannel, appErr := th.App.GetOrCreateDirectChannel(th.Context, th.BasicUser.Id, botUser.Id)
require.Nil(t, appErr)
require.Equal(t, model.ChannelTypeDirect, dmWithBotChannel.Type)
// Try to create a burn-on-read post in DM with bot (regular user sending)
post := &model.Post{
ChannelId: dmWithBotChannel.Id,
Message: "This is a burn-on-read message to AI agent",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
_, _, err := th.App.CreatePost(th.Context, post, dmWithBotChannel, model.CreatePostFlags{SetOnline: true})
require.NotNil(t, err)
require.Equal(t, "api.post.fill_in_post_props.burn_on_read.bot_dm.app_error", err.Id)
})
t.Run("should reject burn-on-read posts in DMs with deleted users", func(t *testing.T) {
// Create a user that we'll delete
userToDelete := th.CreateUser(t)
th.LinkUserToTeam(t, userToDelete, th.BasicTeam)
// Create a DM channel between the regular user and the user we'll delete
dmChannel, appErr := th.App.GetOrCreateDirectChannel(th.Context, th.BasicUser.Id, userToDelete.Id)
require.Nil(t, appErr)
require.Equal(t, model.ChannelTypeDirect, dmChannel.Type)
// Delete the user
appErr = th.App.PermanentDeleteUser(th.Context, userToDelete)
require.Nil(t, appErr)
// Try to create a burn-on-read post in DM with deleted user
post := &model.Post{
ChannelId: dmChannel.Id,
Message: "This is a burn-on-read message to deleted user",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
// This should fail because we can't validate the other user (deleted)
_, _, err := th.App.CreatePost(th.Context, post, dmChannel, model.CreatePostFlags{SetOnline: true})
require.NotNil(t, err)
require.Equal(t, "api.post.fill_in_post_props.burn_on_read.user.app_error", err.Id)
})
t.Run("should allow burn-on-read posts in public channels", func(t *testing.T) {
// This should succeed - public channel, regular user
require.Equal(t, model.ChannelTypeOpen, th.BasicChannel.Type)
post := &model.Post{
ChannelId: th.BasicChannel.Id,
Message: "This is a burn-on-read message in public channel",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
createdPost, _, err := th.App.CreatePost(th.Context, post, th.BasicChannel, model.CreatePostFlags{SetOnline: true})
require.Nil(t, err)
require.NotNil(t, createdPost)
require.Equal(t, model.PostTypeBurnOnRead, createdPost.Type)
})
t.Run("should allow burn-on-read posts in private channels", func(t *testing.T) {
// Create a private channel using helper
createdPrivateChannel := th.CreatePrivateChannel(t, th.BasicTeam)
require.Equal(t, model.ChannelTypePrivate, createdPrivateChannel.Type)
// This should succeed - private channel, regular user
post := &model.Post{
ChannelId: createdPrivateChannel.Id,
Message: "This is a burn-on-read message in private channel",
UserId: th.BasicUser.Id,
Type: model.PostTypeBurnOnRead,
}
createdPost, _, err := th.App.CreatePost(th.Context, post, createdPrivateChannel, model.CreatePostFlags{SetOnline: true})
require.Nil(t, err)
require.NotNil(t, createdPost)
require.Equal(t, model.PostTypeBurnOnRead, createdPost.Type)
})
}
func TestGetBurnOnReadPost(t *testing.T) {
t.Run("success - temporary post found", func(t *testing.T) {
th := Setup(t).InitBasic(t)
post := &model.Post{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
UserId: th.BasicUser.Id,
Message: "placeholder message",
FileIds: model.StringArray{"file1"},
Type: model.PostTypeBurnOnRead,
}
temporaryPost := &model.TemporaryPost{
ID: post.Id,
Type: model.PostTypeBurnOnRead,
ExpireAt: model.GetMillis() + 3600000,
Message: "actual secret message",
FileIDs: model.StringArray{"file2", "file3"},
}
_, err := th.App.Srv().Store().TemporaryPost().Save(th.Context, temporaryPost)
require.NoError(t, err)
resultPost, appErr := th.App.getBurnOnReadPost(th.Context, post)
require.Nil(t, appErr)
require.NotNil(t, resultPost)
assert.Equal(t, temporaryPost.Message, resultPost.Message)
assert.Equal(t, temporaryPost.FileIDs, resultPost.FileIds)
// Ensure original post is not modified
assert.Equal(t, "placeholder message", post.Message)
assert.Equal(t, model.StringArray{"file1"}, post.FileIds)
})
t.Run("temporary post not found - returns app error", func(t *testing.T) {
th := Setup(t).InitBasic(t)
post := &model.Post{
Id: model.NewId(),
ChannelId: th.BasicChannel.Id,
UserId: th.BasicUser.Id,
Message: "placeholder message",
Type: model.PostTypeBurnOnRead,
}
resultPost, appErr := th.App.getBurnOnReadPost(th.Context, post)
require.NotNil(t, appErr)
require.Nil(t, resultPost)
assert.Equal(t, "app.post.get_post.app_error", appErr.Id)
assert.Equal(t, http.StatusInternalServerError, appErr.StatusCode)
})
}

View file

@ -255,6 +255,11 @@ func (pas *PropertyAccessService) UpdatePropertyField(callerID string, groupID s
return nil, fmt.Errorf("UpdatePropertyField: %w", err)
}
// Validate protected field update
if err := pas.validateProtectedFieldUpdate(field, callerID); err != nil {
return nil, fmt.Errorf("UpdatePropertyField: %w", err)
}
// Validate access mode
if err := model.ValidatePropertyFieldAccessMode(field); err != nil {
return nil, fmt.Errorf("UpdatePropertyField: %w", err)
@ -315,6 +320,11 @@ func (pas *PropertyAccessService) UpdatePropertyFields(callerID string, groupID
return nil, fmt.Errorf("UpdatePropertyFields: field %s: %w", field.ID, err)
}
// Validate protected field update
if err := pas.validateProtectedFieldUpdate(field, callerID); err != nil {
return nil, fmt.Errorf("UpdatePropertyFields: field %s: %w", field.ID, err)
}
// Validate access mode
if err := model.ValidatePropertyFieldAccessMode(field); err != nil {
return nil, fmt.Errorf("UpdatePropertyFields: field %s: %w", field.ID, err)
@ -845,6 +855,27 @@ func (pas *PropertyAccessService) ensureSourcePluginIDUnchanged(existingField, u
return nil
}
// validateProtectedFieldUpdate validates that a field can be updated to protected=true.
// Prevents creating orphaned protected fields (protected=true but no source_plugin_id).
// Also ensures only the source plugin can set protected=true on fields with a source_plugin_id.
// Returns nil if the update is valid, or an error if it should be rejected.
func (pas *PropertyAccessService) validateProtectedFieldUpdate(updatedField *model.PropertyField, callerID string) error {
if !model.IsPropertyFieldProtected(updatedField) {
return nil
}
sourcePluginID := pas.getSourcePluginID(updatedField)
if sourcePluginID == "" {
return fmt.Errorf("cannot set protected=true on a field without a source_plugin_id")
}
if sourcePluginID != callerID {
return fmt.Errorf("cannot set protected=true: only source plugin '%s' can modify this field", sourcePluginID)
}
return nil
}
// checkFieldWriteAccess checks if the given caller can modify a PropertyField.
// IMPORTANT: Always pass the existing field fetched from the database, not a field provided by the caller.
// Returns nil if modification is allowed, or an error if denied.

View file

@ -959,6 +959,53 @@ func TestUpdatePropertyField_WriteAccessControl(t *testing.T) {
assert.Contains(t, err.Error(), "immutable")
})
t.Run("prevents setting protected=true without source_plugin_id", func(t *testing.T) {
field := &model.PropertyField{
GroupID: groupID,
Name: "Field Without Source Plugin",
Type: model.PropertyFieldTypeText,
Attrs: model.StringInterface{},
}
created, err := th.App.PropertyAccessService().CreatePropertyField("", field)
require.NoError(t, err)
// Try to set protected=true without having a source_plugin_id
created.Attrs[model.PropertyAttrsProtected] = true
updated, err := th.App.PropertyAccessService().UpdatePropertyField("plugin1", groupID, created)
require.Error(t, err)
assert.Nil(t, updated)
assert.Contains(t, err.Error(), "cannot set protected=true")
assert.Contains(t, err.Error(), "source_plugin_id")
})
t.Run("prevents non-source plugin from setting protected=true", func(t *testing.T) {
field := &model.PropertyField{
GroupID: groupID,
Name: "Field With Source Plugin",
Type: model.PropertyFieldTypeText,
Attrs: model.StringInterface{},
}
// Create field via plugin1 (sets source_plugin_id automatically)
created, err := th.App.PropertyAccessService().CreatePropertyFieldForPlugin("plugin1", field)
require.NoError(t, err)
assert.False(t, model.IsPropertyFieldProtected(created))
// Try to set protected=true by a different plugin (plugin2)
created.Attrs[model.PropertyAttrsProtected] = true
updated, err := th.App.PropertyAccessService().UpdatePropertyField("plugin2", groupID, created)
require.Error(t, err)
assert.Nil(t, updated)
assert.Contains(t, err.Error(), "cannot set protected=true")
assert.Contains(t, err.Error(), "plugin1")
// Verify the source plugin (plugin1) CAN set protected=true
updated, err = th.App.PropertyAccessService().UpdatePropertyField("plugin1", groupID, created)
require.NoError(t, err)
assert.True(t, model.IsPropertyFieldProtected(updated))
})
t.Run("non-CPA group allows anyone to update protected field", func(t *testing.T) {
// Register a non-CPA group
nonCpaGroup, err := pas.RegisterPropertyGroup("other-group-update")
@ -1051,6 +1098,36 @@ func TestUpdatePropertyFields_BulkWriteAccessControl(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, "Protected", check2.Name)
})
t.Run("fails atomically when trying to set protected=true without source_plugin_id in batch", func(t *testing.T) {
// Create two unprotected fields without source_plugin_id
field1 := &model.PropertyField{GroupID: groupID, Name: "Field1", Type: model.PropertyFieldTypeText, Attrs: model.StringInterface{}}
field2 := &model.PropertyField{GroupID: groupID, Name: "Field2", Type: model.PropertyFieldTypeText, Attrs: model.StringInterface{}}
created1, err := th.App.PropertyAccessService().CreatePropertyField("", field1)
require.NoError(t, err)
created2, err := th.App.PropertyAccessService().CreatePropertyField("", field2)
require.NoError(t, err)
// Try to set protected=true on field2 without source_plugin_id
created1.Name = "Updated Field1"
created2.Attrs[model.PropertyAttrsProtected] = true
updated, err := th.App.PropertyAccessService().UpdatePropertyFields("plugin1", groupID, []*model.PropertyField{created1, created2})
require.Error(t, err)
assert.Nil(t, updated)
assert.Contains(t, err.Error(), "cannot set protected=true")
assert.Contains(t, err.Error(), "source_plugin_id")
// Verify neither was updated (atomic failure)
check1, err := th.App.PropertyAccessService().GetPropertyField("", groupID, created1.ID)
require.NoError(t, err)
assert.Equal(t, "Field1", check1.Name)
check2, err := th.App.PropertyAccessService().GetPropertyField("", groupID, created2.ID)
require.NoError(t, err)
assert.False(t, model.IsPropertyFieldProtected(check2))
})
}
// TestDeletePropertyField_WriteAccessControl tests write access control for field deletion

View file

@ -19,7 +19,7 @@ import (
const (
getPendingScheduledPostsPageSize = 100
scheduledPostBatchWaitTime = 1 * time.Second
scheduledPostBatchWaitTime = 100 * time.Millisecond
)
func (a *App) ProcessScheduledPosts(rctx request.CTX) {
@ -314,6 +314,19 @@ func (a *App) canPostScheduledPost(rctx request.CTX, scheduledPost *model.Schedu
return model.ScheduledPostErrorInvalidPost, nil
}
// Validate burn-on-read restrictions for scheduled post
if appErr := PostBurnOnReadCheckWithApp("ScheduledPostJob.postChecks", a, rctx, scheduledPost.UserId, scheduledPost.ChannelId, scheduledPost.Type, channel); appErr != nil {
rctx.Logger().Debug(
"canPostScheduledPost burn-on-read check failed",
mlog.String("scheduled_post_id", scheduledPost.Id),
mlog.String("user_id", scheduledPost.UserId),
mlog.String("channel_id", scheduledPost.ChannelId),
mlog.String("error_code", model.ScheduledPostErrorInvalidPost),
mlog.Err(appErr),
)
return model.ScheduledPostErrorInvalidPost, nil
}
return "", nil
}

View file

@ -996,8 +996,11 @@ func TestSharedChannelMembershipSyncSelfReferential(t *testing.T) {
atomic.StoreInt32(countPtr, 0)
}
// Create a new user that will be added "by cluster-2"
// Create a remote user belonging to cluster-2
userFromCluster2 := th.CreateUser(t)
userFromCluster2.RemoteId = &clusters[1].RemoteId
userFromCluster2, appErr = th.App.UpdateUser(th.Context, userFromCluster2, false)
require.Nil(t, appErr)
_, _, appErr = th.App.AddUserToTeam(th.Context, team.Id, userFromCluster2.Id, th.BasicUser.Id)
require.Nil(t, appErr)

View file

@ -2917,11 +2917,17 @@ func (a *App) GetThreadsForUser(rctx request.CTX, userID, teamID string, options
result.Total = result.TotalUnreadThreads
}
list := &model.PostList{
Posts: make(map[string]*model.Post, len(result.Threads)),
}
for _, thread := range result.Threads {
a.sanitizeProfiles(thread.Participants, false)
thread.Post.SanitizeProps()
list.AddPost(thread.Post)
}
a.populatePostListTranslations(rctx, list)
return &result, nil
}
@ -2953,6 +2959,7 @@ func (a *App) GetThreadForUser(rctx request.CTX, threadMembership *model.ThreadM
a.sanitizeProfiles(thread.Participants, false)
thread.Post.SanitizeProps()
a.populatePostListTranslations(rctx, &model.PostList{Posts: map[string]*model.Post{thread.Post.Id: thread.Post}})
return thread, nil
}

View file

@ -5,6 +5,7 @@ package delete_expired_posts
import (
"encoding/json"
"time"
"github.com/mattermost/mattermost/server/public/model"
"github.com/mattermost/mattermost/server/public/shared/mlog"
@ -13,9 +14,16 @@ import (
"github.com/mattermost/mattermost/server/v8/channels/store"
)
const (
expiredPostsBatchSize = 100
expiredPostsJobBatchWaitTime = 100 * time.Millisecond
)
type AppIface interface {
DeletePost(rctx request.CTX, postID, deleteByID string) (*model.Post, *model.AppError)
PermanentDeletePost(rctx request.CTX, postID, deleteByID string) *model.AppError
PermanentDeletePostDataRetainStub(rctx request.CTX, post *model.Post, deleteByID string) *model.AppError
GetSinglePost(rctx request.CTX, postID string, includeDeleted bool) (*model.Post, *model.AppError)
GetPostsByIds(postIDs []string) ([]*model.Post, int64, *model.AppError)
}
func MakeWorker(jobServer *jobs.JobServer, store store.Store, app AppIface) *jobs.SimpleWorker {
@ -25,22 +33,41 @@ func MakeWorker(jobServer *jobs.JobServer, store store.Store, app AppIface) *job
return model.SafeDereference(cfg.ServiceSettings.EnableBurnOnRead)
}
execute := func(logger mlog.LoggerIFace, job *model.Job) error {
ids, err := store.TemporaryPost().GetExpiredPosts(request.EmptyContext(logger))
if err != nil {
return err
}
deletedPostIDs := make([]string, 0)
for _, id := range ids {
appErr := app.PermanentDeletePost(request.EmptyContext(logger), id, "")
if appErr != nil {
logger.Error("Failed to delete expired post", mlog.Err(appErr), mlog.String("post_id", id))
continue
}
deletedPostIDs = append(deletedPostIDs, id)
}
if job.Data == nil {
job.Data = make(model.StringMap)
}
deletedPostIDs := make([]string, 0)
lastPostId := ""
for {
time.Sleep(expiredPostsJobBatchWaitTime)
postIDs, err := store.TemporaryPost().GetExpiredPosts(request.EmptyContext(logger), lastPostId, expiredPostsBatchSize)
if err != nil {
return err
}
if len(postIDs) == 0 {
break
}
lastPostId = postIDs[len(postIDs)-1]
expiredPosts, _, appErr := app.GetPostsByIds(postIDs)
if appErr != nil {
logger.Error("Failed to get expired posts by IDs", mlog.Err(appErr))
return appErr
}
for _, post := range expiredPosts {
appErr = app.PermanentDeletePostDataRetainStub(request.EmptyContext(logger), post, "")
if appErr != nil {
logger.Error("Failed to delete expired post", mlog.Err(appErr), mlog.String("post_id", post.Id))
continue
}
deletedPostIDs = append(deletedPostIDs, post.Id)
}
}
deletedPostIDsJSON, err := json.Marshal(deletedPostIDs)
if err != nil {
logger.Error("Failed to marshal deleted post IDs", mlog.Err(err))

View file

@ -998,6 +998,27 @@ func (s *RetryLayerAutoTranslationStore) GetLatestPostUpdateAtForChannel(channel
}
func (s *RetryLayerAutoTranslationStore) GetTranslationsSinceForChannel(channelID string, dstLang string, since int64) (map[string]*model.Translation, error) {
tries := 0
for {
result, err := s.AutoTranslationStore.GetTranslationsSinceForChannel(channelID, dstLang, since)
if err == nil {
return result, nil
}
if !isRepeatableError(err) {
return result, err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return result, err
}
timepkg.Sleep(100 * timepkg.Millisecond)
}
}
func (s *RetryLayerAutoTranslationStore) GetUserLanguage(userID string, channelID string) (string, error) {
tries := 0
@ -8726,6 +8747,27 @@ func (s *RetryLayerPostStore) PermanentDelete(rctx request.CTX, postID string) e
}
func (s *RetryLayerPostStore) PermanentDeleteAssociatedData(postIds []string) error {
tries := 0
for {
err := s.PostStore.PermanentDeleteAssociatedData(postIds)
if err == nil {
return nil
}
if !isRepeatableError(err) {
return err
}
tries++
if tries >= 3 {
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
return err
}
timepkg.Sleep(100 * timepkg.Millisecond)
}
}
func (s *RetryLayerPostStore) PermanentDeleteBatch(endTime int64, limit int64) (int64, error) {
tries := 0
@ -14441,11 +14483,11 @@ func (s *RetryLayerTemporaryPostStore) Get(rctx request.CTX, id string, allowFro
}
func (s *RetryLayerTemporaryPostStore) GetExpiredPosts(rctx request.CTX) ([]string, error) {
func (s *RetryLayerTemporaryPostStore) GetExpiredPosts(rctx request.CTX, lastPostId string, limit uint64) ([]string, error) {
tries := 0
for {
result, err := s.TemporaryPostStore.GetExpiredPosts(rctx)
result, err := s.TemporaryPostStore.GetExpiredPosts(rctx, lastPostId, limit)
if err == nil {
return result, nil
}

View file

@ -50,6 +50,16 @@ var searchPostStoreTests = []searchTest{
Fn: testSearchNonLatinWords,
Tags: []string{EngineElasticSearch},
},
{
Name: "Should be able to search CJK words with substring matching",
Fn: testSearchCJKSubstringMatching,
Tags: []string{EnginePostgres},
},
{
Name: "Should be able to search CJK words in realistic sentences",
Fn: testSearchCJKAcceptanceCriteria,
Tags: []string{EnginePostgres, EngineElasticSearch},
},
{
Name: "Should be able to search for alternative spellings of words",
Fn: testSearchAlternativeSpellings,
@ -660,6 +670,286 @@ func testSearchNonLatinWords(t *testing.T, th *SearchTestHelper) {
})
}
func testSearchCJKSubstringMatching(t *testing.T, th *SearchTestHelper) {
// Postgres LIKE-based CJK search does substring matching, so searching "你"
// matches both "你" and "你好" (unlike Elasticsearch's token matching).
// These tests verify Postgres-specific substring and operator behavior.
t.Run("Should be able to search chinese words", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "你好", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "你", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "这是一个测试消息", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
t.Run("Should find single char as substring", func(t *testing.T) {
params := &model.SearchParams{Terms: "你"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 2)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
})
t.Run("Should find multi-char term", func(t *testing.T) {
params := &model.SearchParams{Terms: "你好"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
})
t.Run("Should find substring in longer message", func(t *testing.T) {
params := &model.SearchParams{Terms: "测试"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
})
t.Run("Should handle wildcard (no-op for LIKE)", func(t *testing.T) {
params := &model.SearchParams{Terms: "你*"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 2)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
})
})
t.Run("Should be able to search japanese words", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "こんにちは", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "カタカナ", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
t.Run("Should find hiragana substring", func(t *testing.T) {
params := &model.SearchParams{Terms: "にちは"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
})
t.Run("Should find katakana", func(t *testing.T) {
params := &model.SearchParams{Terms: "カタカナ"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
})
})
t.Run("Should be able to search korean words", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "안녕하세요", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
t.Run("Should find hangul substring", func(t *testing.T) {
params := &model.SearchParams{Terms: "안녕"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
})
})
t.Run("Should handle excluded CJK terms", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "测试一", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
_, err = th.createPost(th.User.Id, th.ChannelBasic.Id, "测试二", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "测试", ExcludedTerms: "二"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
})
t.Run("Should handle OR search with CJK terms", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "苹果", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "香蕉", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
_, err = th.createPost(th.User.Id, th.ChannelBasic.Id, "西瓜", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "苹果 香蕉", OrTerms: true}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 2)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
})
t.Run("Should handle mixed CJK and Latin terms", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "hello 你好 world", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
_, err = th.createPost(th.User.Id, th.ChannelBasic.Id, "hello world", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "hello 你好"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
})
t.Run("Should handle quoted CJK phrases", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "测试消息", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
_, err = th.createPost(th.User.Id, th.ChannelBasic.Id, "测试其他", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: `"测试消息"`}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 1)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
})
}
func testSearchCJKAcceptanceCriteria(t *testing.T, th *SearchTestHelper) {
// These tests verify CJK search works in realistic scenarios and should
// pass on both Postgres (LIKE) and Elasticsearch (with CJK tokenizer).
t.Run("Should find katakana term in business context sentences", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "重要なビジネス環境では、信頼できるコミュニケーションが不可欠です。", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "効果的なリカバリは、ビジネスを継続する鍵です。セルフホスト、プライベートクラウド、高可用性デプロイメントのサポートにより、機密性の高い環境での制御が可能になります。", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "マターモストはビジネスチャットツールです。", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "ビジネス"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 3)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
})
t.Run("Should find company name in various sentence positions", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "富士通は「挑戦」「信頼」「共感」の価値観を大切にし、未知の課題に挑んできました。", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "顧客は「富士通」様となります。", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "現在富士通の官側NWから通話ができない問題について改めて調査を実施しています。", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "富士通"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 3)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
})
t.Run("Should find kanji embedded in text without spaces", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキスト検索", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "検索テストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキスト検索テストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p4, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキスト 検索 テストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "検索"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 4)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
th.checkPostInSearchResults(t, p4.Id, results.Posts)
})
t.Run("Should find hiragana embedded in text without spaces", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキストけんさく", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "けんさくテストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキストけんさくテストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p4, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキスト けんさく テストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "けんさく"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 4)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
th.checkPostInSearchResults(t, p4.Id, results.Posts)
})
t.Run("Should find fullwidth katakana embedded in text without spaces", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキストケンサク", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "ケンサクテストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキストケンサクテストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p4, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキスト ケンサク テストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "ケンサク"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 4)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
th.checkPostInSearchResults(t, p4.Id, results.Posts)
})
t.Run("Should find halfwidth katakana embedded in text without spaces", func(t *testing.T) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキストケンサク", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p2, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "ケンサクテストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
p3, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "テストテキストケンサクテストテキスト", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)
defer th.deleteUserPosts(th.User.Id)
params := &model.SearchParams{Terms: "ケンサク"}
results, err := th.Store.Post().SearchPostsForUser(th.Context, []*model.SearchParams{params}, th.User.Id, th.Team.Id, 0, 20)
require.NoError(t, err)
require.Len(t, results.Posts, 3)
th.checkPostInSearchResults(t, p1.Id, results.Posts)
th.checkPostInSearchResults(t, p2.Id, results.Posts)
th.checkPostInSearchResults(t, p3.Id, results.Posts)
})
}
func testSearchAlternativeSpellings(t *testing.T, th *SearchTestHelper) {
p1, err := th.createPost(th.User.Id, th.ChannelBasic.Id, "Straße test", "", model.PostTypeDefault, 0, false)
require.NoError(t, err)

View file

@ -233,7 +233,8 @@ func (s *SqlAutoTranslationStore) GetAllForObject(objectType, objectID string) (
Where(sq.Eq{"ObjectType": objectType, "ObjectId": objectID})
var translations []Translation
if err := s.GetReplica().SelectBuilder(&translations, query); err != nil {
// Use GetMaster to avoid replica lag issues when workers fetch queued items
if err := s.GetMaster().SelectBuilder(&translations, query); err != nil {
return nil, errors.Wrapf(err, "failed to get all translations for object_id=%s", objectID)
}
@ -434,3 +435,63 @@ func (s *SqlAutoTranslationStore) GetLatestPostUpdateAtForChannel(channelID stri
}
func (s *SqlAutoTranslationStore) InvalidatePostTranslationEtag(channelID string) {}
func (s *SqlAutoTranslationStore) GetTranslationsSinceForChannel(channelID, dstLang string, since int64) (map[string]*model.Translation, error) {
query := s.getQueryBuilder().
Select("ObjectType", "ObjectId", "DstLang", "ProviderId", "NormHash", "Text", "Confidence", "Meta", "State", "UpdateAt").
From("Translations").
Where(sq.Eq{"channelid": channelID}).
Where(sq.Eq{"DstLang": dstLang}).
Where(sq.Eq{"ObjectType": model.TranslationObjectTypePost}).
Where(sq.NotEq{"State": string(model.TranslationStateProcessing)}).
Where(sq.Gt{"UpdateAt": since}).
Limit(1000)
var translations []Translation
if err := s.GetReplica().SelectBuilder(&translations, query); err != nil {
return nil, errors.Wrapf(err, "failed to get translations since for channel_id=%s dst_lang=%s", channelID, dstLang)
}
result := make(map[string]*model.Translation, len(translations))
for _, t := range translations {
var translationTypeStr string
meta, err := t.Meta.ToMap()
if err != nil {
continue
}
if v, ok := meta["type"]; ok {
if s, ok := v.(string); ok {
translationTypeStr = s
}
}
objectType := t.ObjectType
if objectType == "" {
objectType = model.TranslationObjectTypePost
}
modelT := &model.Translation{
ObjectID: t.ObjectID,
ObjectType: objectType,
Lang: t.DstLang,
Type: model.TranslationType(translationTypeStr),
Confidence: t.Confidence,
State: model.TranslationState(t.State),
NormHash: t.NormHash,
Meta: meta,
UpdateAt: t.UpdateAt,
}
if modelT.Type == model.TranslationTypeObject {
modelT.ObjectJSON = json.RawMessage(t.Text)
} else {
modelT.Text = t.Text
}
result[t.ObjectID] = modelT
}
return result, nil
}

View file

@ -1021,6 +1021,52 @@ func (s *SqlPostStore) permanentDelete(postIds []string) (err error) {
}
defer finalizeTransactionX(transaction, &err)
err = s.permanentDeleteAssociatedData(transaction, postIds)
if err != nil {
return err
}
query := s.getQueryBuilder().
Delete("Posts").
Where(sq.Eq{"Id": postIds})
if _, err = transaction.ExecBuilder(query); err != nil {
return errors.Wrap(err, "failed to delete Posts")
}
if err = transaction.Commit(); err != nil {
return errors.Wrap(err, "commit_transaction")
}
return nil
}
// PermanentDeleteAssociatedData deletes the following data items associated with the given post IDs:
// - Threads
// - Reactions
// - Temporary Posts
// - Read Receipts
// - Thread replies if post is a root post
func (s *SqlPostStore) PermanentDeleteAssociatedData(postIds []string) error {
transaction, err := s.GetMaster().Beginx()
if err != nil {
return errors.Wrap(err, "begin_transaction")
}
defer finalizeTransactionX(transaction, &err)
err = s.permanentDeleteAssociatedData(transaction, postIds)
if err != nil {
return err
}
if err = transaction.Commit(); err != nil {
return errors.Wrap(err, "commit_transaction")
}
return nil
}
func (s *SqlPostStore) permanentDeleteAssociatedData(transaction *sqlxTxWrapper, postIds []string) (err error) {
if err = s.permanentDeleteThreads(transaction, postIds); err != nil {
return err
}
@ -1039,20 +1085,12 @@ func (s *SqlPostStore) permanentDelete(postIds []string) (err error) {
query := s.getQueryBuilder().
Delete("Posts").
Where(
sq.Or{
sq.Eq{"Id": postIds},
sq.Eq{"RootId": postIds},
},
)
Where(sq.Eq{"RootId": postIds})
if _, err = transaction.ExecBuilder(query); err != nil {
return errors.Wrap(err, "failed to delete Posts")
}
if err = transaction.Commit(); err != nil {
return errors.Wrap(err, "commit_transaction")
}
return nil
}
@ -2042,6 +2080,69 @@ func (s *SqlPostStore) Search(teamId string, userId string, params *model.Search
return s.search(teamId, userId, params, true, true)
}
// splitCJKSearchTerms splits search terms for LIKE usage.
// It extracts quoted phrases as single terms, splits remaining text by whitespace,
// and strips trailing wildcards (LIKE '%term%' is already bidirectional).
func splitCJKSearchTerms(input string) []string {
var terms []string
// Extract quoted phrases first
quotes := quotedStringsRegex.FindAllStringIndex(input, -1)
remaining := input
offset := 0
for _, loc := range quotes {
// Add the quoted phrase content (without quotes)
phrase := input[loc[0]+1 : loc[1]-1]
if phrase != "" {
terms = append(terms, strings.TrimRight(phrase, "*"))
}
// Remove this quoted section from remaining
remaining = remaining[:loc[0]-offset] + " " + remaining[loc[1]-offset:]
offset += (loc[1] - loc[0]) - 1
}
// Split remaining unquoted text by whitespace
for word := range strings.FieldsSeq(remaining) {
word = strings.TrimRight(word, "*")
if word != "" {
terms = append(terms, word)
}
}
return terms
}
// buildCJKSearchClause builds LIKE WHERE clauses for CJK search terms.
func (s *SqlPostStore) buildCJKSearchClause(baseQuery sq.SelectBuilder, searchType, terms, excludedTerms string, orTerms bool) sq.SelectBuilder {
escapeChar := "\\"
if terms != "" {
parsedTerms := splitCJKSearchTerms(terms)
if orTerms {
ors := sq.Or{}
for _, term := range parsedTerms {
sanitized := sanitizeSearchTerm(term, escapeChar)
ors = append(ors, sq.Like{searchType: "%" + sanitized + "%"})
}
if len(ors) > 0 {
baseQuery = baseQuery.Where(ors)
}
} else {
for _, term := range parsedTerms {
sanitized := sanitizeSearchTerm(term, escapeChar)
baseQuery = baseQuery.Where(sq.Like{searchType: "%" + sanitized + "%"})
}
}
}
if excludedTerms != "" {
parsedExcluded := splitCJKSearchTerms(excludedTerms)
for _, term := range parsedExcluded {
sanitized := sanitizeSearchTerm(term, escapeChar)
baseQuery = baseQuery.Where(sq.NotLike{searchType: "%" + sanitized + "%"})
}
}
return baseQuery
}
func (s *SqlPostStore) search(teamId string, userId string, params *model.SearchParams, channelsByName bool, userByUsername bool) (*model.PostList, error) {
list := model.NewPostList()
if params.Terms == "" && params.ExcludedTerms == "" &&
@ -2089,6 +2190,16 @@ func (s *SqlPostStore) search(teamId string, userId string, params *model.Search
if terms == "" && excludedTerms == "" {
// we've already confirmed that we have a channel or user to search for
} else if s.getFeatureFlags().CJKSearch && (model.ContainsCJK(terms) || model.ContainsCJK(excludedTerms)) {
// CJK characters are not supported by PostgreSQL's to_tsvector/to_tsquery
// with the default English text search config. Fall back to LIKE matching.
//
// Why not pg_bigm? pg_bigm provides excellent CJK full-text search,
// but it requires installing a third-party C extension.
// Some managed PostgreSQL services do not support pg_bigm, so we cannot rely on it
// for all deployments. LIKE-based matching works with vanilla PostgreSQL.
// It also adds complexity as we would only need that index for CJK deployments.
baseQuery = s.buildCJKSearchClause(baseQuery, searchType, terms, excludedTerms, params.OrTerms)
} else {
// Parse text for wildcards
terms = wildCardRegex.ReplaceAllLiteralString(terms, ":* ")

View file

@ -142,6 +142,7 @@ type SqlStore struct {
pgDefaultTextSearchConfig string
skipMigrations bool
disableMorphLogging bool
featureFlagsFn func() *model.FeatureFlags
quitMonitor chan struct{}
wgMonitor *sync.WaitGroup
@ -161,6 +162,25 @@ func DisableMorphLogging() Option {
}
}
// WithFeatureFlags provides a callback that returns the current feature flags.
// This allows the store layer to read feature flags without depending on the full config.
func WithFeatureFlags(fn func() *model.FeatureFlags) Option {
return func(s *SqlStore) error {
s.featureFlagsFn = fn
return nil
}
}
// getFeatureFlags returns the current feature flags, or defaults if no function was configured.
func (ss *SqlStore) getFeatureFlags() *model.FeatureFlags {
if ss.featureFlagsFn != nil {
return ss.featureFlagsFn()
}
ff := &model.FeatureFlags{}
ff.SetDefaults()
return ff
}
func New(settings model.SqlSettings, logger mlog.LoggerIFace, metrics einterfaces.MetricsInterface, options ...Option) (*SqlStore, error) {
store := &SqlStore{
rrCounter: 0,

View file

@ -198,7 +198,9 @@ func initStores(logger mlog.LoggerIFace, parallelism int) {
for _, st := range storeTypes {
eg.Go(func() error {
var err error
st.SqlStore, err = New(*st.SqlSettings, logger, nil)
st.SqlStore, err = New(*st.SqlSettings, logger, nil, WithFeatureFlags(func() *model.FeatureFlags {
return &model.FeatureFlags{CJKSearch: true}
}))
if err != nil {
return err
}

View file

@ -147,13 +147,16 @@ func (s *SqlTemporaryPostStore) Delete(rctx request.CTX, id string) error {
return nil
}
func (s *SqlTemporaryPostStore) GetExpiredPosts(rctx request.CTX) ([]string, error) {
func (s *SqlTemporaryPostStore) GetExpiredPosts(rctx request.CTX, lastPostId string, limit uint64) ([]string, error) {
now := model.GetMillis()
query := s.getQueryBuilder().
Select("PostId").
From("TemporaryPosts").
Where(sq.LtOrEq{"ExpireAt": now})
OrderBy("PostId ASC").
Where(sq.LtOrEq{"ExpireAt": now}).
Where(sq.Gt{"PostId": lastPostId}).
Limit(limit)
ids := []string{}
err := s.GetMaster().SelectBuilder(&ids, query)

View file

@ -36,7 +36,9 @@ func NewTestPool(logger mlog.LoggerIFace, driverName string, poolSize int) (*Tes
for range poolSize {
eg.Go(func() error {
settings := storetest.MakeSqlSettings(driverName)
sqlStore, err := New(*settings, logger, nil)
sqlStore, err := New(*settings, logger, nil, WithFeatureFlags(func() *model.FeatureFlags {
return &model.FeatureFlags{CJKSearch: true}
}))
if err != nil {
return err
}

View file

@ -425,6 +425,7 @@ type PostStore interface {
// RefreshPostStats refreshes the various materialized views for admin console post stats.
RefreshPostStats() error
RestoreContentFlaggedPost(post *model.Post, statusFieldId, contentFlaggingManagedFieldId string) error
PermanentDeleteAssociatedData(postIds []string) error
}
type UserStore interface {
@ -1181,6 +1182,10 @@ type AutoTranslationStore interface {
// InvalidatePostTranslationEtag invalidates the cached post translation etag for a channel.
// This should be called after saving a new post translation.
InvalidatePostTranslationEtag(channelID string)
// GetTranslationsSinceForChannel returns translations updated after `since` for posts in the
// given channel and destination language. Only non-processing translations are returned.
// The result is keyed by post ID.
GetTranslationsSinceForChannel(channelID, dstLang string, since int64) (map[string]*model.Translation, error)
}
type ContentFlaggingStore interface {
@ -1206,7 +1211,7 @@ type TemporaryPostStore interface {
Save(rctx request.CTX, post *model.TemporaryPost) (*model.TemporaryPost, error)
Get(rctx request.CTX, id string, allowFromCache bool) (*model.TemporaryPost, error)
Delete(rctx request.CTX, id string) error
GetExpiredPosts(rctx request.CTX) ([]string, error)
GetExpiredPosts(rctx request.CTX, lastPostId string, limit uint64) ([]string, error)
}
// ChannelSearchOpts contains options for searching channels.

View file

@ -18,6 +18,7 @@ func TestAutoTranslationStore(t *testing.T, rctx request.CTX, ss store.Store, s
t.Run("IsUserEnabled", func(t *testing.T) { testAutoTranslationIsUserEnabled(t, rctx, ss) })
t.Run("GetUserLanguage", func(t *testing.T) { testAutoTranslationGetUserLanguage(t, rctx, ss) })
t.Run("GetActiveDestinationLanguages", func(t *testing.T) { testAutoTranslationGetActiveDestinationLanguages(t, rctx, ss) })
t.Run("GetAllForObject", func(t *testing.T) { testAutoTranslationGetAllForObject(t, ss) })
}
func testAutoTranslationIsUserEnabled(t *testing.T, rctx request.CTX, ss store.Store) {
@ -376,3 +377,72 @@ func testAutoTranslationGetActiveDestinationLanguages(t *testing.T, rctx request
assert.Contains(t, languages, "es")
})
}
func testAutoTranslationGetAllForObject(t *testing.T, ss store.Store) {
objectID := model.NewId()
objectType := model.TranslationObjectTypePost
t.Run("returns empty for nonexistent object", func(t *testing.T) {
results, err := ss.AutoTranslation().GetAllForObject(objectType, model.NewId())
require.NoError(t, err)
assert.Empty(t, results)
})
t.Run("returns all translations for an object", func(t *testing.T) {
// Save translations in two languages
err := ss.AutoTranslation().Save(&model.Translation{
ObjectID: objectID,
ObjectType: objectType,
Lang: "es",
Provider: "test",
Type: model.TranslationTypeString,
Text: "hola",
State: model.TranslationStateReady,
})
require.NoError(t, err)
err = ss.AutoTranslation().Save(&model.Translation{
ObjectID: objectID,
ObjectType: objectType,
Lang: "fr",
Provider: "test",
Type: model.TranslationTypeString,
Text: "bonjour",
State: model.TranslationStateReady,
})
require.NoError(t, err)
results, err := ss.AutoTranslation().GetAllForObject(objectType, objectID)
require.NoError(t, err)
require.Len(t, results, 2)
langToText := make(map[string]string)
for _, tr := range results {
assert.Equal(t, objectID, tr.ObjectID)
assert.Equal(t, objectType, tr.ObjectType)
langToText[tr.Lang] = tr.Text
}
assert.Equal(t, "hola", langToText["es"])
assert.Equal(t, "bonjour", langToText["fr"])
})
t.Run("does not return translations for other objects", func(t *testing.T) {
otherID := model.NewId()
err := ss.AutoTranslation().Save(&model.Translation{
ObjectID: otherID,
ObjectType: objectType,
Lang: "de",
Provider: "test",
Type: model.TranslationTypeString,
Text: "hallo",
State: model.TranslationStateReady,
})
require.NoError(t, err)
results, err := ss.AutoTranslation().GetAllForObject(objectType, objectID)
require.NoError(t, err)
for _, tr := range results {
assert.Equal(t, objectID, tr.ObjectID)
}
})
}

View file

@ -197,6 +197,36 @@ func (_m *AutoTranslationStore) GetLatestPostUpdateAtForChannel(channelID string
return r0, r1
}
// GetTranslationsSinceForChannel provides a mock function with given fields: channelID, dstLang, since
func (_m *AutoTranslationStore) GetTranslationsSinceForChannel(channelID string, dstLang string, since int64) (map[string]*model.Translation, error) {
ret := _m.Called(channelID, dstLang, since)
if len(ret) == 0 {
panic("no return value specified for GetTranslationsSinceForChannel")
}
var r0 map[string]*model.Translation
var r1 error
if rf, ok := ret.Get(0).(func(string, string, int64) (map[string]*model.Translation, error)); ok {
return rf(channelID, dstLang, since)
}
if rf, ok := ret.Get(0).(func(string, string, int64) map[string]*model.Translation); ok {
r0 = rf(channelID, dstLang, since)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(map[string]*model.Translation)
}
}
if rf, ok := ret.Get(1).(func(string, string, int64) error); ok {
r1 = rf(channelID, dstLang, since)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetUserLanguage provides a mock function with given fields: userID, channelID
func (_m *AutoTranslationStore) GetUserLanguage(userID string, channelID string) (string, error) {
ret := _m.Called(userID, channelID)

View file

@ -1137,6 +1137,24 @@ func (_m *PostStore) PermanentDelete(rctx request.CTX, postID string) error {
return r0
}
// PermanentDeleteAssociatedData provides a mock function with given fields: postIds
func (_m *PostStore) PermanentDeleteAssociatedData(postIds []string) error {
ret := _m.Called(postIds)
if len(ret) == 0 {
panic("no return value specified for PermanentDeleteAssociatedData")
}
var r0 error
if rf, ok := ret.Get(0).(func([]string) error); ok {
r0 = rf(postIds)
} else {
r0 = ret.Error(0)
}
return r0
}
// PermanentDeleteBatch provides a mock function with given fields: endTime, limit
func (_m *PostStore) PermanentDeleteBatch(endTime int64, limit int64) (int64, error) {
ret := _m.Called(endTime, limit)

View file

@ -63,9 +63,9 @@ func (_m *TemporaryPostStore) Get(rctx request.CTX, id string, allowFromCache bo
return r0, r1
}
// GetExpiredPosts provides a mock function with given fields: rctx
func (_m *TemporaryPostStore) GetExpiredPosts(rctx request.CTX) ([]string, error) {
ret := _m.Called(rctx)
// GetExpiredPosts provides a mock function with given fields: rctx, lastPostId, limit
func (_m *TemporaryPostStore) GetExpiredPosts(rctx request.CTX, lastPostId string, limit uint64) ([]string, error) {
ret := _m.Called(rctx, lastPostId, limit)
if len(ret) == 0 {
panic("no return value specified for GetExpiredPosts")
@ -73,19 +73,19 @@ func (_m *TemporaryPostStore) GetExpiredPosts(rctx request.CTX) ([]string, error
var r0 []string
var r1 error
if rf, ok := ret.Get(0).(func(request.CTX) ([]string, error)); ok {
return rf(rctx)
if rf, ok := ret.Get(0).(func(request.CTX, string, uint64) ([]string, error)); ok {
return rf(rctx, lastPostId, limit)
}
if rf, ok := ret.Get(0).(func(request.CTX) []string); ok {
r0 = rf(rctx)
if rf, ok := ret.Get(0).(func(request.CTX, string, uint64) []string); ok {
r0 = rf(rctx, lastPostId, limit)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]string)
}
}
if rf, ok := ret.Get(1).(func(request.CTX) error); ok {
r1 = rf(rctx)
if rf, ok := ret.Get(1).(func(request.CTX, string, uint64) error); ok {
r1 = rf(rctx, lastPostId, limit)
} else {
r1 = ret.Error(1)
}

View file

@ -108,7 +108,7 @@ func testTemporaryPostDelete(t *testing.T, rctx request.CTX, ss store.Store) {
}
func testTemporaryPostGetExpiredPosts(t *testing.T, rctx request.CTX, ss store.Store) {
t.Run("should get expired posts", func(t *testing.T) {
t.Run("should get expired posts with pagination", func(t *testing.T) {
now := model.GetMillis()
pastTime := now - 3600000 // 1 hour ago
@ -132,10 +132,80 @@ func testTemporaryPostGetExpiredPosts(t *testing.T, rctx request.CTX, ss store.S
_, err = ss.TemporaryPost().Save(rctx, validPost)
require.NoError(t, err)
// Get expired posts
expiredPosts, err := ss.TemporaryPost().GetExpiredPosts(rctx)
// Get expired posts with empty lastPostId (first page)
expiredPosts, err := ss.TemporaryPost().GetExpiredPosts(rctx, "", 100)
require.NoError(t, err)
require.Equal(t, 1, len(expiredPosts))
require.Equal(t, expiredPost.ID, expiredPosts[0])
require.GreaterOrEqual(t, len(expiredPosts), 1)
require.Contains(t, expiredPosts, expiredPost.ID)
require.NotContains(t, expiredPosts, validPost.ID)
// Cleanup
_ = ss.TemporaryPost().Delete(rctx, expiredPost.ID)
_ = ss.TemporaryPost().Delete(rctx, validPost.ID)
})
t.Run("should return empty when no expired posts exist", func(t *testing.T) {
now := model.GetMillis()
// Create only non-expired posts
validPost := &model.TemporaryPost{
ID: model.NewId(),
Type: model.PostTypeDefault,
ExpireAt: now + 3600000, // 1 hour from now
Message: "Valid message",
}
_, err := ss.TemporaryPost().Save(rctx, validPost)
require.NoError(t, err)
expiredPosts, err := ss.TemporaryPost().GetExpiredPosts(rctx, "", 100)
require.NoError(t, err)
require.Empty(t, expiredPosts)
// Cleanup
_ = ss.TemporaryPost().Delete(rctx, validPost.ID)
})
t.Run("should respect limit and paginate using lastPostId cursor", func(t *testing.T) {
now := model.GetMillis()
pastTime := now - 3600000 // 1 hour ago
// Create multiple expired posts
expiredPostIDs := make([]string, 5)
for i := range 5 {
post := &model.TemporaryPost{
ID: model.NewId(),
Type: model.PostTypeDefault,
ExpireAt: pastTime,
Message: "Expired message",
}
_, err := ss.TemporaryPost().Save(rctx, post)
require.NoError(t, err)
expiredPostIDs[i] = post.ID
}
// Get first batch with limit of 2
firstBatch, err := ss.TemporaryPost().GetExpiredPosts(rctx, "", 2)
require.NoError(t, err)
require.Len(t, firstBatch, 2)
// Collect all expired posts through pagination
var allCollectedIDs []string
lastPostId := ""
for {
batch, err := ss.TemporaryPost().GetExpiredPosts(rctx, lastPostId, 2)
require.NoError(t, err)
if len(batch) == 0 {
break
}
allCollectedIDs = append(allCollectedIDs, batch...)
lastPostId = batch[len(batch)-1]
}
require.ElementsMatch(t, expiredPostIDs, allCollectedIDs)
// Cleanup
for _, id := range expiredPostIDs {
_ = ss.TemporaryPost().Delete(rctx, id)
}
})
}

View file

@ -900,6 +900,22 @@ func (s *TimerLayerAutoTranslationStore) GetLatestPostUpdateAtForChannel(channel
return result, err
}
func (s *TimerLayerAutoTranslationStore) GetTranslationsSinceForChannel(channelID string, dstLang string, since int64) (map[string]*model.Translation, error) {
start := time.Now()
result, err := s.AutoTranslationStore.GetTranslationsSinceForChannel(channelID, dstLang, since)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("AutoTranslationStore.GetTranslationsSinceForChannel", success, elapsed)
}
return result, err
}
func (s *TimerLayerAutoTranslationStore) GetUserLanguage(userID string, channelID string) (string, error) {
start := time.Now()
@ -7010,6 +7026,22 @@ func (s *TimerLayerPostStore) PermanentDelete(rctx request.CTX, postID string) e
return err
}
func (s *TimerLayerPostStore) PermanentDeleteAssociatedData(postIds []string) error {
start := time.Now()
err := s.PostStore.PermanentDeleteAssociatedData(postIds)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {
success := "false"
if err == nil {
success = "true"
}
s.Root.Metrics.ObserveStoreMethodDuration("PostStore.PermanentDeleteAssociatedData", success, elapsed)
}
return err
}
func (s *TimerLayerPostStore) PermanentDeleteBatch(endTime int64, limit int64) (int64, error) {
start := time.Now()
@ -11407,10 +11439,10 @@ func (s *TimerLayerTemporaryPostStore) Get(rctx request.CTX, id string, allowFro
return result, err
}
func (s *TimerLayerTemporaryPostStore) GetExpiredPosts(rctx request.CTX) ([]string, error) {
func (s *TimerLayerTemporaryPostStore) GetExpiredPosts(rctx request.CTX, lastPostId string, limit uint64) ([]string, error) {
start := time.Now()
result, err := s.TemporaryPostStore.GetExpiredPosts(rctx)
result, err := s.TemporaryPostStore.GetExpiredPosts(rctx, lastPostId, limit)
elapsed := float64(time.Since(start)) / float64(time.Second)
if s.Root.Metrics != nil {

View file

@ -436,6 +436,7 @@ func completeOAuth(c *Context, w http.ResponseWriter, r *http.Request) {
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
model.SessionCookieToken: c.AppContext.Session().Token,
model.SessionCookieCsrf: c.AppContext.Session().GetCSRF(),
"srv": c.App.GetSiteURL(), // Server URL for mobile client verification
})
utils.RenderMobileAuthComplete(w, redirectURL)

View file

@ -853,6 +853,59 @@ func (th *TestHelper) AddPermissionToRole(tb testing.TB, permission string, role
require.Nil(tb, appErr)
}
// TestOAuthMobileCallbackIncludesSrvParameter verifies that mobile OAuth callbacks
// include the 'srv' parameter for origin verification
func TestOAuthMobileCallbackIncludesSrvParameter(t *testing.T) {
// The 'srv' parameter is added to mobile callbacks to allow the client
// to verify the server origin
t.Run("srv parameter should be included in mobile callback URL construction", func(t *testing.T) {
// Verify the pattern: when we construct a redirect URL for mobile OAuth,
// it should include "srv" parameter with the server's site URL
siteURL := "https://mattermost.example.com"
sessionToken := "test-session-token"
csrfToken := "test-csrf-token"
// Simulate what the code does when constructing the callback
params := map[string]string{
model.SessionCookieToken: sessionToken,
model.SessionCookieCsrf: csrfToken,
"srv": siteURL,
}
// Verify all expected parameters are present
assert.Equal(t, sessionToken, params[model.SessionCookieToken])
assert.Equal(t, csrfToken, params[model.SessionCookieCsrf])
assert.Equal(t, siteURL, params["srv"])
})
t.Run("srv parameter detects OAuth server mismatch", func(t *testing.T) {
// Scenario: The srv parameter from callback doesn't match expected server
// Mobile should detect the mismatch
expectedServer := "https://server-a.example.com"
actualSrvFromCallback := "https://server-b.example.com"
// This is the check that should happen in mobile
isMismatch := expectedServer != actualSrvFromCallback
assert.True(t, isMismatch, "Should detect server mismatch")
})
t.Run("srv parameter allows legitimate OAuth login", func(t *testing.T) {
// Scenario: Normal OAuth login to legitimate.com
// Server adds srv=legitimate.com to callback
// Mobile verifies: expected (legitimate.com) == srv (legitimate.com)
expectedServer := "https://legitimate.example.com"
actualSrvFromCallback := "https://legitimate.example.com"
// This is the check that should happen in mobile
isLegitimate := expectedServer == actualSrvFromCallback
assert.True(t, isLegitimate, "Should allow legitimate OAuth login")
})
}
func TestFullyQualifiedRedirectURL(t *testing.T) {
const siteURL = "https://xxx.yyy/mm"

View file

@ -261,6 +261,7 @@ func completeSaml(c *Context, w http.ResponseWriter, r *http.Request) {
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
"login_code": code.Token,
"srv": c.App.GetSiteURL(), // Server URL for mobile client verification
})
utils.RenderMobileAuthComplete(w, redirectURL)
return
@ -281,13 +282,12 @@ func completeSaml(c *Context, w http.ResponseWriter, r *http.Request) {
if hasRedirectURL {
if isMobile {
// Mobile clients with redirect url support
// Legacy mobile path: return tokens only when SAML code exchange was not requested
if samlChallenge == "" {
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
model.SessionCookieToken: c.AppContext.Session().Token,
model.SessionCookieCsrf: c.AppContext.Session().GetCSRF(),
})
}
// Always add tokens for mobile in legacy path (we only reach here if code-exchange was skipped)
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
model.SessionCookieToken: c.AppContext.Session().Token,
model.SessionCookieCsrf: c.AppContext.Session().GetCSRF(),
"srv": c.App.GetSiteURL(), // Server URL for mobile client verification (config-based, not request Host)
})
utils.RenderMobileAuthComplete(w, redirectURL)
} else {
http.Redirect(w, r, redirectURL, http.StatusFound)

View file

@ -0,0 +1,88 @@
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package web
import (
"encoding/base64"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/mattermost/mattermost/server/public/model"
)
// TestSamlCallbackIncludesSrvParameter verifies that mobile SAML callbacks
// include the 'srv' parameter for origin verification
func TestSamlCallbackIncludesSrvParameter(t *testing.T) {
// The 'srv' parameter is added to mobile callbacks to allow the client
// to verify the server origin
t.Run("srv parameter should be included in redirect URL construction", func(t *testing.T) {
// Verify the pattern: when we construct a redirect URL for mobile,
// it should include "srv" parameter with the server's site URL
siteURL := "https://mattermost.example.com"
sessionToken := "test-session-token"
csrfToken := "test-csrf-token"
// Simulate what the code does when constructing the callback
params := map[string]string{
model.SessionCookieToken: sessionToken,
model.SessionCookieCsrf: csrfToken,
"srv": siteURL,
}
// Verify all expected parameters are present
assert.Equal(t, sessionToken, params[model.SessionCookieToken])
assert.Equal(t, csrfToken, params[model.SessionCookieCsrf])
assert.Equal(t, siteURL, params["srv"])
})
t.Run("srv parameter detects server mismatch", func(t *testing.T) {
// Scenario: The srv parameter from callback doesn't match expected server
// Mobile should detect the mismatch
expectedServer := "https://server-a.example.com"
actualSrvFromCallback := "https://server-b.example.com"
// This is the check that should happen in mobile
isMismatch := expectedServer != actualSrvFromCallback
assert.True(t, isMismatch, "Should detect server mismatch")
})
t.Run("srv parameter allows legitimate login", func(t *testing.T) {
// Scenario: Normal login to legitimate server
// Server adds srv=server.com to callback
// Mobile verifies: expected == srv
expectedServer := "https://server.example.com"
actualSrvFromCallback := "https://server.example.com"
// This is the check that should happen in mobile
isLegitimate := expectedServer == actualSrvFromCallback
assert.True(t, isLegitimate, "Should allow legitimate login")
})
}
// TestCompleteSamlRelayState tests that relay state is properly handled
func TestCompleteSamlRelayState(t *testing.T) {
t.Run("should decode relay state correctly", func(t *testing.T) {
relayProps := map[string]string{
"action": model.OAuthActionMobile,
"redirect_to": "mmauth://callback",
}
relayState := base64.StdEncoding.EncodeToString([]byte(model.MapToJSON(relayProps)))
// Decode and verify
decoded, err := base64.StdEncoding.DecodeString(relayState)
require.NoError(t, err)
decodedProps := model.MapFromJSON(strings.NewReader(string(decoded)))
assert.Equal(t, model.OAuthActionMobile, decodedProps["action"])
assert.Equal(t, "mmauth://callback", decodedProps["redirect_to"])
})
}

View file

@ -51,6 +51,7 @@ type Client interface {
GetGroupsByChannel(ctx context.Context, channelID string, groupOpts model.GroupSearchOpts) ([]*model.GroupWithSchemeAdmin, int, *model.Response, error)
GetGroupsByTeam(ctx context.Context, teamID string, groupOpts model.GroupSearchOpts) ([]*model.GroupWithSchemeAdmin, int, *model.Response, error)
RestoreGroup(ctx context.Context, groupID string, etag string) (*model.Group, *model.Response, error)
GetOldClientLicense(ctx context.Context, etag string) (map[string]string, *model.Response, error)
UploadLicenseFile(ctx context.Context, data []byte) (*model.Response, error)
RemoveLicenseFile(ctx context.Context) (*model.Response, error)
GetLogs(ctx context.Context, page, perPage int) ([]string, *model.Response, error)

View file

@ -249,10 +249,15 @@ func downloadFile(path string, downloadFn func(*os.File) (string, error), retrie
return "", fmt.Errorf("%s file already exists", fileType)
case err != nil:
// file does not exist, we create it
outFile, err = os.Create(path)
outFile, err = os.OpenFile(path, os.O_WRONLY|os.O_CREATE, 0600)
createdFile = true
default:
// no error, file exists, we open it
// no error, file exists, we double check the permissions and then open it
permErr := os.Chmod(path, 0600)
if permErr != nil {
return "", fmt.Errorf("failed to change permissions on output file: %w", permErr)
}
outFile, err = os.OpenFile(path, os.O_WRONLY, 0600)
}

View file

@ -5,6 +5,7 @@ package commands
import (
"fmt"
"io/fs"
"os"
"path/filepath"
"strings"
@ -256,6 +257,10 @@ func (s *MmctlE2ETestSuite) TestExportDownloadCmdF() {
s.Require().Len(printer.GetLines(), 1)
s.Require().True(strings.HasPrefix(printer.GetLines()[0].(string), "Export file downloaded to "))
s.Require().Empty(printer.GetErrorLines())
info, err := os.Stat(downloadPath)
s.Require().Nil(err)
s.Require().Equal(fs.FileMode(0600), info.Mode().Perm(), fmt.Sprintf("expected %o, got %o", fs.FileMode(0600), info.Mode().Perm()))
})
s.RunForSystemAdminAndLocal("MM-T3842 - full download", func(c client.Client) {
@ -285,6 +290,10 @@ func (s *MmctlE2ETestSuite) TestExportDownloadCmdF() {
s.Require().Nil(err)
s.Require().Equal(expected, actual)
info, err := os.Stat(downloadPath)
s.Require().Nil(err)
s.Require().Equal(fs.FileMode(0600), info.Mode().Perm(), fmt.Sprintf("expected %o, got %o", fs.FileMode(0600), info.Mode().Perm()))
})
}

View file

@ -7,6 +7,8 @@ import (
"context"
"errors"
"os"
"strconv"
"time"
"github.com/mattermost/mattermost/server/v8/cmd/mmctl/client"
"github.com/mattermost/mattermost/server/v8/cmd/mmctl/printer"
@ -43,10 +45,19 @@ var RemoveLicenseCmd = &cobra.Command{
RunE: withClient(removeLicenseCmdF),
}
var GetLicenseCmd = &cobra.Command{
Use: "get",
Short: "Get the current license.",
Long: "Get the current server license and print it.",
Example: " license get",
RunE: withClient(getLicenseCmdF),
}
func init() {
LicenseCmd.AddCommand(UploadLicenseCmd)
LicenseCmd.AddCommand(RemoveLicenseCmd)
LicenseCmd.AddCommand(UploadLicenseStringCmd)
LicenseCmd.AddCommand(GetLicenseCmd)
RootCmd.AddCommand(LicenseCmd)
}
@ -94,3 +105,41 @@ func removeLicenseCmdF(c client.Client, cmd *cobra.Command, args []string) error
return nil
}
func getLicenseCmdF(c client.Client, cmd *cobra.Command, args []string) error {
license, _, err := c.GetOldClientLicense(context.TODO(), "")
if err != nil {
return err
}
if license["IsLicensed"] != "true" {
printer.Print("No license installed")
return nil
}
// Format timestamps for human-readable output
license["StartsAtReadable"] = formatLicenseTimestamp(license["StartsAt"])
license["ExpiresAtReadable"] = formatLicenseTimestamp(license["ExpiresAt"])
license["IssuedAtReadable"] = formatLicenseTimestamp(license["IssuedAt"])
printer.PrintT(`License ID: {{.Id}}
Starts At: {{.StartsAtReadable}}
Expires At: {{.ExpiresAtReadable}}
Users: {{.Users}}
SKU: {{.SkuShortName}}
Is Trial: {{.IsTrial}}
Issued At: {{.IssuedAtReadable}}
Name: {{.Name}}
Company: {{.Company}}`, license)
return nil
}
// formatLicenseTimestamp converts an epoch milliseconds string to a human-readable date.
func formatLicenseTimestamp(epochMs string) string {
ms, err := strconv.ParseInt(epochMs, 10, 64)
if err != nil {
return epochMs
}
return time.UnixMilli(ms).UTC().Format(time.RFC3339)
}

View file

@ -122,3 +122,77 @@ func (s *MmctlUnitTestSuite) TestUploadLicenseStringCmdF() {
s.Require().EqualError(err, "enter one license file to upload")
})
}
func (s *MmctlUnitTestSuite) TestGetLicenseCmdF() {
s.Run("Get license successfully", func() {
printer.Clean()
mockLicense := map[string]string{
"IsLicensed": "true",
"Id": "test-license-id",
"Company": "Test Company",
"Name": "Test Contact",
"Email": "test@example.com",
"SkuShortName": "enterprise",
"Users": "100",
"IssuedAt": "1609459200000",
"StartsAt": "1609459200000",
"ExpiresAt": "1640995200000",
"IsTrial": "false",
}
s.client.
EXPECT().
GetOldClientLicense(context.TODO(), "").
Return(mockLicense, &model.Response{StatusCode: http.StatusOK}, nil).
Times(1)
err := getLicenseCmdF(s.client, &cobra.Command{}, []string{})
s.Require().Nil(err)
s.Require().Len(printer.GetLines(), 1)
s.Require().Len(printer.GetErrorLines(), 0)
// Verify human-readable dates are formatted correctly
output := printer.GetLines()[0].(map[string]string)
s.Equal("2021-01-01T00:00:00Z", output["IssuedAtReadable"])
s.Equal("2021-01-01T00:00:00Z", output["StartsAtReadable"])
s.Equal("2022-01-01T00:00:00Z", output["ExpiresAtReadable"])
s.Equal("Test Company", output["Company"])
s.Equal("enterprise", output["SkuShortName"])
})
s.Run("No license installed", func() {
printer.Clean()
mockLicense := map[string]string{
"IsLicensed": "false",
}
s.client.
EXPECT().
GetOldClientLicense(context.TODO(), "").
Return(mockLicense, &model.Response{StatusCode: http.StatusOK}, nil).
Times(1)
err := getLicenseCmdF(s.client, &cobra.Command{}, []string{})
s.Require().Nil(err)
s.Require().Len(printer.GetLines(), 1)
s.Require().Equal(printer.GetLines()[0], "No license installed")
})
s.Run("Fail to get license", func() {
printer.Clean()
mockErr := errors.New("mock error")
s.client.
EXPECT().
GetOldClientLicense(context.TODO(), "").
Return(nil, &model.Response{StatusCode: http.StatusInternalServerError}, mockErr).
Times(1)
err := getLicenseCmdF(s.client, &cobra.Command{}, []string{})
s.Require().NotNil(err)
s.Require().Equal(err, mockErr)
s.Require().Len(printer.GetLines(), 0)
})
}

Some files were not shown because too many files have changed in this diff Show more