summaryrefslogtreecommitdiffstats
path: root/store/sqlstore
diff options
context:
space:
mode:
Diffstat (limited to 'store/sqlstore')
-rw-r--r--store/sqlstore/audit_store.go146
-rw-r--r--store/sqlstore/audit_store_test.go90
-rw-r--r--store/sqlstore/channel_store.go1664
-rw-r--r--store/sqlstore/channel_store_test.go1962
-rw-r--r--store/sqlstore/cluster_discovery_store.go229
-rw-r--r--store/sqlstore/cluster_discovery_store_test.go202
-rw-r--r--store/sqlstore/command_store.go226
-rw-r--r--store/sqlstore/command_store_test.go221
-rw-r--r--store/sqlstore/command_webhook_store.go125
-rw-r--r--store/sqlstore/command_webhook_store_test.go65
-rw-r--r--store/sqlstore/compliance_store.go267
-rw-r--r--store/sqlstore/compliance_store_test.go318
-rw-r--r--store/sqlstore/emoji_store.go212
-rw-r--r--store/sqlstore/emoji_store_test.go176
-rw-r--r--store/sqlstore/file_info_store.go317
-rw-r--r--store/sqlstore/file_info_store_test.go300
-rw-r--r--store/sqlstore/job_store.go349
-rw-r--r--store/sqlstore/job_store_test.go407
-rw-r--r--store/sqlstore/license_store.go83
-rw-r--r--store/sqlstore/license_store_test.go55
-rw-r--r--store/sqlstore/oauth_store.go570
-rw-r--r--store/sqlstore/oauth_store_test.go446
-rw-r--r--store/sqlstore/post_store.go1393
-rw-r--r--store/sqlstore/post_store_test.go1703
-rw-r--r--store/sqlstore/preference_store.go427
-rw-r--r--store/sqlstore/preference_store_test.go517
-rw-r--r--store/sqlstore/reaction_store_test.go352
-rw-r--r--store/sqlstore/session_store.go349
-rw-r--r--store/sqlstore/session_store_test.go258
-rw-r--r--store/sqlstore/status_store.go245
-rw-r--r--store/sqlstore/status_store_test.go105
-rw-r--r--store/sqlstore/store.go87
-rw-r--r--store/sqlstore/store_test.go151
-rw-r--r--store/sqlstore/supplier.go884
-rw-r--r--store/sqlstore/supplier_reactions.go213
-rw-r--r--store/sqlstore/system_store.go137
-rw-r--r--store/sqlstore/system_store_test.go57
-rw-r--r--store/sqlstore/team_store.go837
-rw-r--r--store/sqlstore/team_store_test.go1028
-rw-r--r--store/sqlstore/tokens_store.go110
-rw-r--r--store/sqlstore/upgrade.go310
-rw-r--r--store/sqlstore/upgrade_test.go41
-rw-r--r--store/sqlstore/user_access_token_store.go263
-rw-r--r--store/sqlstore/user_access_token_store_test.go87
-rw-r--r--store/sqlstore/user_store.go1620
-rw-r--r--store/sqlstore/user_store_test.go2020
-rw-r--r--store/sqlstore/webhook_store.go573
-rw-r--r--store/sqlstore/webhook_store_test.go516
48 files changed, 22713 insertions, 0 deletions
diff --git a/store/sqlstore/audit_store.go b/store/sqlstore/audit_store.go
new file mode 100644
index 000000000..4910db79b
--- /dev/null
+++ b/store/sqlstore/audit_store.go
@@ -0,0 +1,146 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlAuditStore struct {
+ SqlStore
+}
+
+func NewSqlAuditStore(sqlStore SqlStore) store.AuditStore {
+ s := &SqlAuditStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Audit{}, "Audits").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("Action").SetMaxSize(512)
+ table.ColMap("ExtraInfo").SetMaxSize(1024)
+ table.ColMap("IpAddress").SetMaxSize(64)
+ table.ColMap("SessionId").SetMaxSize(26)
+ }
+
+ return s
+}
+
+func (s SqlAuditStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_audits_user_id", "Audits", "UserId")
+}
+
+func (s SqlAuditStore) Save(audit *model.Audit) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ audit.Id = model.NewId()
+ audit.CreateAt = model.GetMillis()
+
+ if err := s.GetMaster().Insert(audit); err != nil {
+ result.Err = model.NewAppError("SqlAuditStore.Save", "store.sql_audit.save.saving.app_error", nil, "user_id="+audit.UserId+" action="+audit.Action, http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlAuditStore) Get(user_id string, offset int, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if limit > 1000 {
+ limit = 1000
+ result.Err = model.NewAppError("SqlAuditStore.Get", "store.sql_audit.get.limit.app_error", nil, "user_id="+user_id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ query := "SELECT * FROM Audits"
+
+ if len(user_id) != 0 {
+ query += " WHERE UserId = :user_id"
+ }
+
+ query += " ORDER BY CreateAt DESC LIMIT :limit OFFSET :offset"
+
+ var audits model.Audits
+ if _, err := s.GetReplica().Select(&audits, query, map[string]interface{}{"user_id": user_id, "limit": limit, "offset": offset}); err != nil {
+ result.Err = model.NewAppError("SqlAuditStore.Get", "store.sql_audit.get.finding.app_error", nil, "user_id="+user_id, http.StatusInternalServerError)
+ } else {
+ result.Data = audits
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlAuditStore) PermanentDeleteByUser(userId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM Audits WHERE UserId = :userId",
+ map[string]interface{}{"userId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlAuditStore.Delete", "store.sql_audit.permanent_delete_by_user.app_error", nil, "user_id="+userId, http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlAuditStore) PermanentDeleteBatch(endTime int64, limit int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var query string
+ if *utils.Cfg.SqlSettings.DriverName == "postgres" {
+ query = "DELETE from Audits WHERE Id = any (array (SELECT Id FROM Audits WHERE CreateAt < :EndTime LIMIT :Limit))"
+ } else {
+ query = "DELETE from Audits WHERE CreateAt < :EndTime LIMIT :Limit"
+ }
+
+ sqlResult, err := s.GetMaster().Exec(query, map[string]interface{}{"EndTime": endTime, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlAuditStore.PermanentDeleteBatch", "store.sql_audit.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ } else {
+ rowsAffected, err1 := sqlResult.RowsAffected()
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlAuditStore.PermanentDeleteBatch", "store.sql_audit.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ result.Data = int64(0)
+ } else {
+ result.Data = rowsAffected
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/audit_store_test.go b/store/sqlstore/audit_store_test.go
new file mode 100644
index 000000000..4378c13e5
--- /dev/null
+++ b/store/sqlstore/audit_store_test.go
@@ -0,0 +1,90 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+func TestSqlAuditStore(t *testing.T) {
+ ss := Setup()
+
+ audit := &model.Audit{UserId: model.NewId(), IpAddress: "ipaddress", Action: "Action"}
+ store.Must(ss.Audit().Save(audit))
+ time.Sleep(100 * time.Millisecond)
+ store.Must(ss.Audit().Save(audit))
+ time.Sleep(100 * time.Millisecond)
+ store.Must(ss.Audit().Save(audit))
+ time.Sleep(100 * time.Millisecond)
+ audit.ExtraInfo = "extra"
+ time.Sleep(100 * time.Millisecond)
+ store.Must(ss.Audit().Save(audit))
+
+ time.Sleep(100 * time.Millisecond)
+
+ c := ss.Audit().Get(audit.UserId, 0, 100)
+ result := <-c
+ audits := result.Data.(model.Audits)
+
+ if len(audits) != 4 {
+ t.Fatal("Failed to save and retrieve 4 audit logs")
+ }
+
+ if audits[0].ExtraInfo != "extra" {
+ t.Fatal("Failed to save property for extra info")
+ }
+
+ c = ss.Audit().Get("missing", 0, 100)
+ result = <-c
+ audits = result.Data.(model.Audits)
+
+ if len(audits) != 0 {
+ t.Fatal("Should have returned empty because user_id is missing")
+ }
+
+ c = ss.Audit().Get("", 0, 100)
+ result = <-c
+ audits = result.Data.(model.Audits)
+
+ if len(audits) < 4 {
+ t.Fatal("Failed to save and retrieve 4 audit logs")
+ }
+
+ if r2 := <-ss.Audit().PermanentDeleteByUser(audit.UserId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+}
+
+func TestAuditStorePermanentDeleteBatch(t *testing.T) {
+ ss := Setup()
+
+ a1 := &model.Audit{UserId: model.NewId(), IpAddress: "ipaddress", Action: "Action"}
+ store.Must(ss.Audit().Save(a1))
+ time.Sleep(10 * time.Millisecond)
+ a2 := &model.Audit{UserId: a1.UserId, IpAddress: "ipaddress", Action: "Action"}
+ store.Must(ss.Audit().Save(a2))
+ time.Sleep(10 * time.Millisecond)
+ cutoff := model.GetMillis()
+ time.Sleep(10 * time.Millisecond)
+ a3 := &model.Audit{UserId: a1.UserId, IpAddress: "ipaddress", Action: "Action"}
+ store.Must(ss.Audit().Save(a3))
+
+ if r := <-ss.Audit().Get(a1.UserId, 0, 100); len(r.Data.(model.Audits)) != 3 {
+ t.Fatal("Expected 3 audits. Got ", len(r.Data.(model.Audits)))
+ }
+
+ store.Must(ss.Audit().PermanentDeleteBatch(cutoff, 1000000))
+
+ if r := <-ss.Audit().Get(a1.UserId, 0, 100); len(r.Data.(model.Audits)) != 1 {
+ t.Fatal("Expected 1 audit. Got ", len(r.Data.(model.Audits)))
+ }
+
+ if r2 := <-ss.Audit().PermanentDeleteByUser(a1.UserId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+}
diff --git a/store/sqlstore/channel_store.go b/store/sqlstore/channel_store.go
new file mode 100644
index 000000000..06cca9e88
--- /dev/null
+++ b/store/sqlstore/channel_store.go
@@ -0,0 +1,1664 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+
+ l4g "github.com/alecthomas/log4go"
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+const (
+ ALL_CHANNEL_MEMBERS_FOR_USER_CACHE_SIZE = model.SESSION_CACHE_SIZE
+ ALL_CHANNEL_MEMBERS_FOR_USER_CACHE_SEC = 900 // 15 mins
+
+ ALL_CHANNEL_MEMBERS_NOTIFY_PROPS_FOR_CHANNEL_CACHE_SIZE = model.SESSION_CACHE_SIZE
+ ALL_CHANNEL_MEMBERS_NOTIFY_PROPS_FOR_CHANNEL_CACHE_SEC = 1800 // 30 mins
+
+ CHANNEL_MEMBERS_COUNTS_CACHE_SIZE = model.CHANNEL_CACHE_SIZE
+ CHANNEL_MEMBERS_COUNTS_CACHE_SEC = 1800 // 30 mins
+
+ CHANNEL_CACHE_SEC = 900 // 15 mins
+)
+
+type SqlChannelStore struct {
+ SqlStore
+ metrics einterfaces.MetricsInterface
+}
+
+var channelMemberCountsCache = utils.NewLru(CHANNEL_MEMBERS_COUNTS_CACHE_SIZE)
+var allChannelMembersForUserCache = utils.NewLru(ALL_CHANNEL_MEMBERS_FOR_USER_CACHE_SIZE)
+var allChannelMembersNotifyPropsForChannelCache = utils.NewLru(ALL_CHANNEL_MEMBERS_NOTIFY_PROPS_FOR_CHANNEL_CACHE_SIZE)
+var channelCache = utils.NewLru(model.CHANNEL_CACHE_SIZE)
+var channelByNameCache = utils.NewLru(model.CHANNEL_CACHE_SIZE)
+
+func ClearChannelCaches() {
+ channelMemberCountsCache.Purge()
+ allChannelMembersForUserCache.Purge()
+ allChannelMembersNotifyPropsForChannelCache.Purge()
+ channelCache.Purge()
+ channelByNameCache.Purge()
+}
+
+func NewSqlChannelStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.ChannelStore {
+ s := &SqlChannelStore{
+ SqlStore: sqlStore,
+ metrics: metrics,
+ }
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Channel{}, "Channels").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("TeamId").SetMaxSize(26)
+ table.ColMap("Type").SetMaxSize(1)
+ table.ColMap("DisplayName").SetMaxSize(64)
+ table.ColMap("Name").SetMaxSize(64)
+ table.SetUniqueTogether("Name", "TeamId")
+ table.ColMap("Header").SetMaxSize(1024)
+ table.ColMap("Purpose").SetMaxSize(250)
+ table.ColMap("CreatorId").SetMaxSize(26)
+
+ tablem := db.AddTableWithName(model.ChannelMember{}, "ChannelMembers").SetKeys(false, "ChannelId", "UserId")
+ tablem.ColMap("ChannelId").SetMaxSize(26)
+ tablem.ColMap("UserId").SetMaxSize(26)
+ tablem.ColMap("Roles").SetMaxSize(64)
+ tablem.ColMap("NotifyProps").SetMaxSize(2000)
+ }
+
+ return s
+}
+
+func (s SqlChannelStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_channels_team_id", "Channels", "TeamId")
+ s.CreateIndexIfNotExists("idx_channels_name", "Channels", "Name")
+ s.CreateIndexIfNotExists("idx_channels_update_at", "Channels", "UpdateAt")
+ s.CreateIndexIfNotExists("idx_channels_create_at", "Channels", "CreateAt")
+ s.CreateIndexIfNotExists("idx_channels_delete_at", "Channels", "DeleteAt")
+
+ s.CreateIndexIfNotExists("idx_channelmembers_channel_id", "ChannelMembers", "ChannelId")
+ s.CreateIndexIfNotExists("idx_channelmembers_user_id", "ChannelMembers", "UserId")
+
+ s.CreateFullTextIndexIfNotExists("idx_channels_txt", "Channels", "Name, DisplayName")
+}
+
+func (s SqlChannelStore) Save(channel *model.Channel) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ var result store.StoreResult
+ if channel.Type == model.CHANNEL_DIRECT {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save.direct_channel.app_error", nil, "", http.StatusBadRequest)
+ } else {
+ if transaction, err := s.GetMaster().Begin(); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save.open_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result = s.saveChannelT(transaction, channel)
+ if result.Err != nil {
+ transaction.Rollback()
+ } else {
+ if err := transaction.Commit(); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save.commit_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) CreateDirectChannel(userId string, otherUserId string) store.StoreChannel {
+ channel := new(model.Channel)
+
+ channel.DisplayName = ""
+ channel.Name = model.GetDMNameFromIds(otherUserId, userId)
+
+ channel.Header = ""
+ channel.Type = model.CHANNEL_DIRECT
+
+ cm1 := &model.ChannelMember{
+ UserId: userId,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ Roles: model.ROLE_CHANNEL_USER.Id,
+ }
+ cm2 := &model.ChannelMember{
+ UserId: otherUserId,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ Roles: model.ROLE_CHANNEL_USER.Id,
+ }
+
+ return s.SaveDirectChannel(channel, cm1, cm2)
+}
+
+func (s SqlChannelStore) SaveDirectChannel(directchannel *model.Channel, member1 *model.ChannelMember, member2 *model.ChannelMember) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ var result store.StoreResult
+
+ if directchannel.Type != model.CHANNEL_DIRECT {
+ result.Err = model.NewAppError("SqlChannelStore.SaveDirectChannel", "store.sql_channel.save_direct_channel.not_direct.app_error", nil, "", http.StatusBadRequest)
+ } else {
+ if transaction, err := s.GetMaster().Begin(); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.SaveDirectChannel", "store.sql_channel.save_direct_channel.open_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ directchannel.TeamId = ""
+ channelResult := s.saveChannelT(transaction, directchannel)
+
+ if channelResult.Err != nil {
+ transaction.Rollback()
+ result.Err = channelResult.Err
+ result.Data = channelResult.Data
+ } else {
+ newChannel := channelResult.Data.(*model.Channel)
+ // Members need new channel ID
+ member1.ChannelId = newChannel.Id
+ member2.ChannelId = newChannel.Id
+
+ member1Result := s.saveMemberT(transaction, member1, newChannel)
+ member2Result := s.saveMemberT(transaction, member2, newChannel)
+
+ if member1Result.Err != nil || member2Result.Err != nil {
+ transaction.Rollback()
+ details := ""
+ if member1Result.Err != nil {
+ details += "Member1Err: " + member1Result.Err.Message
+ }
+ if member2Result.Err != nil {
+ details += "Member2Err: " + member2Result.Err.Message
+ }
+ result.Err = model.NewAppError("SqlChannelStore.SaveDirectChannel", "store.sql_channel.save_direct_channel.add_members.app_error", nil, details, http.StatusInternalServerError)
+ } else {
+ if err := transaction.Commit(); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.SaveDirectChannel", "store.sql_channel.save_direct_channel.commit.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result = channelResult
+ }
+ }
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) saveChannelT(transaction *gorp.Transaction, channel *model.Channel) store.StoreResult {
+ result := store.StoreResult{}
+
+ if len(channel.Id) > 0 {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save_channel.existing.app_error", nil, "id="+channel.Id, http.StatusBadRequest)
+ return result
+ }
+
+ channel.PreSave()
+ if result.Err = channel.IsValid(); result.Err != nil {
+ return result
+ }
+
+ if channel.Type != model.CHANNEL_DIRECT && channel.Type != model.CHANNEL_GROUP {
+ if count, err := transaction.SelectInt("SELECT COUNT(0) FROM Channels WHERE TeamId = :TeamId AND DeleteAt = 0 AND (Type = 'O' OR Type = 'P')", map[string]interface{}{"TeamId": channel.TeamId}); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save_channel.current_count.app_error", nil, "teamId="+channel.TeamId+", "+err.Error(), http.StatusInternalServerError)
+ return result
+ } else if count > *utils.Cfg.TeamSettings.MaxChannelsPerTeam {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save_channel.limit.app_error", nil, "teamId="+channel.TeamId, http.StatusBadRequest)
+ return result
+ }
+ }
+
+ if err := transaction.Insert(channel); err != nil {
+ if IsUniqueConstraintError(err, []string{"Name", "channels_name_teamid_key"}) {
+ dupChannel := model.Channel{}
+ s.GetMaster().SelectOne(&dupChannel, "SELECT * FROM Channels WHERE TeamId = :TeamId AND Name = :Name", map[string]interface{}{"TeamId": channel.TeamId, "Name": channel.Name})
+ if dupChannel.DeleteAt > 0 {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save_channel.previously.app_error", nil, "id="+channel.Id+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.Save", store.CHANNEL_EXISTS_ERROR, nil, "id="+channel.Id+", "+err.Error(), http.StatusBadRequest)
+ result.Data = &dupChannel
+ }
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.Save", "store.sql_channel.save_channel.save.app_error", nil, "id="+channel.Id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = channel
+ }
+
+ return result
+}
+
+func (s SqlChannelStore) Update(channel *model.Channel) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ channel.PreUpdate()
+
+ if result.Err = channel.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if count, err := s.GetMaster().Update(channel); err != nil {
+ if IsUniqueConstraintError(err, []string{"Name", "channels_name_teamid_key"}) {
+ dupChannel := model.Channel{}
+ s.GetReplica().SelectOne(&dupChannel, "SELECT * FROM Channels WHERE TeamId = :TeamId AND Name= :Name AND DeleteAt > 0", map[string]interface{}{"TeamId": channel.TeamId, "Name": channel.Name})
+ if dupChannel.DeleteAt > 0 {
+ result.Err = model.NewAppError("SqlChannelStore.Update", "store.sql_channel.update.previously.app_error", nil, "id="+channel.Id+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.Update", "store.sql_channel.update.exists.app_error", nil, "id="+channel.Id+", "+err.Error(), http.StatusBadRequest)
+ }
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.Update", "store.sql_channel.update.updating.app_error", nil, "id="+channel.Id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else if count != 1 {
+ result.Err = model.NewAppError("SqlChannelStore.Update", "store.sql_channel.update.app_error", nil, "id="+channel.Id, http.StatusInternalServerError)
+ } else {
+ result.Data = channel
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) extraUpdated(channel *model.Channel) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ channel.ExtraUpdated()
+
+ _, err := s.GetMaster().Exec(
+ `UPDATE
+ Channels
+ SET
+ ExtraUpdateAt = :Time
+ WHERE
+ Id = :Id`,
+ map[string]interface{}{"Id": channel.Id, "Time": channel.ExtraUpdateAt})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.extraUpdated", "store.sql_channel.extra_updated.app_error", nil, "id="+channel.Id+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetChannelUnread(channelId, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var unreadChannel model.ChannelUnread
+ err := s.GetReplica().SelectOne(&unreadChannel,
+ `SELECT
+ Channels.TeamId TeamId, Channels.Id ChannelId, (Channels.TotalMsgCount - ChannelMembers.MsgCount) MsgCount, ChannelMembers.MentionCount MentionCount, ChannelMembers.NotifyProps NotifyProps
+ FROM
+ Channels, ChannelMembers
+ WHERE
+ Id = ChannelId
+ AND Id = :ChannelId
+ AND UserId = :UserId
+ AND DeleteAt = 0`,
+ map[string]interface{}{"ChannelId": channelId, "UserId": userId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetChannelUnread", "store.sql_channel.get_unread.app_error", nil, "channelId="+channelId+" "+err.Error(), http.StatusInternalServerError)
+ if err == sql.ErrNoRows {
+ result.Err.StatusCode = http.StatusNotFound
+ }
+ } else {
+ result.Data = &unreadChannel
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlChannelStore) InvalidateChannel(id string) {
+ channelCache.Remove(id)
+}
+
+func (us SqlChannelStore) InvalidateChannelByName(teamId, name string) {
+ channelByNameCache.Remove(teamId + name)
+}
+
+func (s SqlChannelStore) Get(id string, allowFromCache bool) store.StoreChannel {
+ return s.get(id, false, allowFromCache)
+}
+
+func (s SqlChannelStore) GetPinnedPosts(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ pl := model.NewPostList()
+
+ var posts []*model.Post
+ if _, err := s.GetReplica().Select(&posts, "SELECT * FROM Posts WHERE IsPinned = true AND ChannelId = :ChannelId AND DeleteAt = 0 ORDER BY CreateAt ASC", map[string]interface{}{"ChannelId": channelId}); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPinnedPosts", "store.sql_channel.pinned_posts.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, post := range posts {
+ pl.AddPost(post)
+ pl.AddOrder(post.Id)
+ }
+ }
+
+ result.Data = pl
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetFromMaster(id string) store.StoreChannel {
+ return s.get(id, true, false)
+}
+
+func (s SqlChannelStore) get(id string, master bool, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var db *gorp.DbMap
+ if master {
+ db = s.GetMaster()
+ } else {
+ db = s.GetReplica()
+ }
+
+ if allowFromCache {
+ if cacheItem, ok := channelCache.Get(id); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Channel")
+ }
+ result.Data = (cacheItem.(*model.Channel)).DeepCopy()
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Channel")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Channel")
+ }
+ }
+
+ if obj, err := db.Get(model.Channel{}, id); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.Get", "store.sql_channel.get.find.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlChannelStore.Get", "store.sql_channel.get.existing.app_error", nil, "id="+id, http.StatusNotFound)
+ } else {
+ result.Data = obj.(*model.Channel)
+ channelCache.AddWithExpiresInSecs(id, obj.(*model.Channel), CHANNEL_CACHE_SEC)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) Delete(channelId string, time int64) store.StoreChannel {
+ return s.SetDeleteAt(channelId, time, time)
+}
+
+func (s SqlChannelStore) Restore(channelId string, time int64) store.StoreChannel {
+ return s.SetDeleteAt(channelId, 0, time)
+}
+
+func (s SqlChannelStore) SetDeleteAt(channelId string, deleteAt int64, updateAt int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("Update Channels SET DeleteAt = :DeleteAt, UpdateAt = :UpdateAt WHERE Id = :ChannelId", map[string]interface{}{"DeleteAt": deleteAt, "UpdateAt": updateAt, "ChannelId": channelId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.Delete", "store.sql_channel.delete.channel.app_error", nil, "id="+channelId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) PermanentDeleteByTeam(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM Channels WHERE TeamId = :TeamId", map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.PermanentDeleteByTeam", "store.sql_channel.permanent_delete_by_team.app_error", nil, "teamId="+teamId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) PermanentDelete(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM Channels WHERE Id = :ChannelId", map[string]interface{}{"ChannelId": channelId}); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.PermanentDelete", "store.sql_channel.permanent_delete.app_error", nil, "channel_id="+channelId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) PermanentDeleteMembersByChannel(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM ChannelMembers WHERE ChannelId = :ChannelId", map[string]interface{}{"ChannelId": channelId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.RemoveAllMembersByChannel", "store.sql_channel.remove_member.app_error", nil, "channel_id="+channelId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+type channelWithMember struct {
+ model.Channel
+ model.ChannelMember
+}
+
+func (s SqlChannelStore) GetChannels(teamId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ data := &model.ChannelList{}
+ _, err := s.GetReplica().Select(data, "SELECT Channels.* FROM Channels, ChannelMembers WHERE Id = ChannelId AND UserId = :UserId AND DeleteAt = 0 AND (TeamId = :TeamId OR TeamId = '') ORDER BY DisplayName", map[string]interface{}{"TeamId": teamId, "UserId": userId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetChannels", "store.sql_channel.get_channels.get.app_error", nil, "teamId="+teamId+", userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ if len(*data) == 0 {
+ result.Err = model.NewAppError("SqlChannelStore.GetChannels", "store.sql_channel.get_channels.not_found.app_error", nil, "teamId="+teamId+", userId="+userId, http.StatusBadRequest)
+ } else {
+ result.Data = data
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetMoreChannels(teamId string, userId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ data := &model.ChannelList{}
+ _, err := s.GetReplica().Select(data,
+ `SELECT
+ *
+ FROM
+ Channels
+ WHERE
+ TeamId = :TeamId1
+ AND Type IN ('O')
+ AND DeleteAt = 0
+ AND Id NOT IN (SELECT
+ Channels.Id
+ FROM
+ Channels,
+ ChannelMembers
+ WHERE
+ Id = ChannelId
+ AND TeamId = :TeamId2
+ AND UserId = :UserId
+ AND DeleteAt = 0)
+ ORDER BY DisplayName
+ LIMIT :Limit
+ OFFSET :Offset`,
+ map[string]interface{}{"TeamId1": teamId, "TeamId2": teamId, "UserId": userId, "Limit": limit, "Offset": offset})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetMoreChannels", "store.sql_channel.get_more_channels.get.app_error", nil, "teamId="+teamId+", userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = data
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetPublicChannelsForTeam(teamId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ data := &model.ChannelList{}
+ _, err := s.GetReplica().Select(data,
+ `SELECT
+ *
+ FROM
+ Channels
+ WHERE
+ TeamId = :TeamId
+ AND Type = 'O'
+ AND DeleteAt = 0
+ ORDER BY DisplayName
+ LIMIT :Limit
+ OFFSET :Offset`,
+ map[string]interface{}{"TeamId": teamId, "Limit": limit, "Offset": offset})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetPublicChannelsForTeam", "store.sql_channel.get_public_channels.get.app_error", nil, "teamId="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = data
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetPublicChannelsByIdsForTeam(teamId string, channelIds []string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ props := make(map[string]interface{})
+ props["teamId"] = teamId
+
+ idQuery := ""
+
+ for index, channelId := range channelIds {
+ if len(idQuery) > 0 {
+ idQuery += ", "
+ }
+
+ props["channelId"+strconv.Itoa(index)] = channelId
+ idQuery += ":channelId" + strconv.Itoa(index)
+ }
+
+ data := &model.ChannelList{}
+ _, err := s.GetReplica().Select(data,
+ `SELECT
+ *
+ FROM
+ Channels
+ WHERE
+ TeamId = :teamId
+ AND Type = 'O'
+ AND DeleteAt = 0
+ AND Id IN (`+idQuery+`)
+ ORDER BY DisplayName`,
+ props)
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetPublicChannelsByIdsForTeam", "store.sql_channel.get_channels_by_ids.get.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ if len(*data) == 0 {
+ result.Err = model.NewAppError("SqlChannelStore.GetPublicChannelsByIdsForTeam", "store.sql_channel.get_channels_by_ids.not_found.app_error", nil, "", http.StatusNotFound)
+ }
+
+ result.Data = data
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+type channelIdWithCountAndUpdateAt struct {
+ Id string
+ TotalMsgCount int64
+ UpdateAt int64
+}
+
+func (s SqlChannelStore) GetChannelCounts(teamId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []channelIdWithCountAndUpdateAt
+ _, err := s.GetReplica().Select(&data, "SELECT Id, TotalMsgCount, UpdateAt FROM Channels WHERE Id IN (SELECT ChannelId FROM ChannelMembers WHERE UserId = :UserId) AND (TeamId = :TeamId OR TeamId = '') AND DeleteAt = 0 ORDER BY DisplayName", map[string]interface{}{"TeamId": teamId, "UserId": userId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetChannelCounts", "store.sql_channel.get_channel_counts.get.app_error", nil, "teamId="+teamId+", userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ counts := &model.ChannelCounts{Counts: make(map[string]int64), UpdateTimes: make(map[string]int64)}
+ for i := range data {
+ v := data[i]
+ counts.Counts[v.Id] = v.TotalMsgCount
+ counts.UpdateTimes[v.Id] = v.UpdateAt
+ }
+
+ result.Data = counts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetTeamChannels(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ data := &model.ChannelList{}
+ _, err := s.GetReplica().Select(data, "SELECT * FROM Channels WHERE TeamId = :TeamId And Type != 'D' ORDER BY DisplayName", map[string]interface{}{"TeamId": teamId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetChannels", "store.sql_channel.get_channels.get.app_error", nil, "teamId="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ if len(*data) == 0 {
+ result.Err = model.NewAppError("SqlChannelStore.GetChannels", "store.sql_channel.get_channels.not_found.app_error", nil, "teamId="+teamId, http.StatusNotFound)
+ } else {
+ result.Data = data
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetByName(teamId string, name string, allowFromCache bool) store.StoreChannel {
+ return s.getByName(teamId, name, false, allowFromCache)
+}
+
+func (s SqlChannelStore) GetByNameIncludeDeleted(teamId string, name string, allowFromCache bool) store.StoreChannel {
+ return s.getByName(teamId, name, true, allowFromCache)
+}
+
+func (s SqlChannelStore) getByName(teamId string, name string, includeDeleted bool, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ var query string
+ if includeDeleted {
+ query = "SELECT * FROM Channels WHERE (TeamId = :TeamId OR TeamId = '') AND Name = :Name"
+ } else {
+ query = "SELECT * FROM Channels WHERE (TeamId = :TeamId OR TeamId = '') AND Name = :Name AND DeleteAt = 0"
+ }
+
+ go func() {
+ result := store.StoreResult{}
+
+ channel := model.Channel{}
+
+ if allowFromCache {
+ if cacheItem, ok := channelByNameCache.Get(teamId + name); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Channel By Name")
+ }
+ result.Data = cacheItem.(*model.Channel)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Channel By Name")
+ }
+ }
+ }
+ if err := s.GetReplica().SelectOne(&channel, query, map[string]interface{}{"TeamId": teamId, "Name": name}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlChannelStore.GetByName", store.MISSING_CHANNEL_ERROR, nil, "teamId="+teamId+", "+"name="+name+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.GetByName", "store.sql_channel.get_by_name.existing.app_error", nil, "teamId="+teamId+", "+"name="+name+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = &channel
+ channelByNameCache.AddWithExpiresInSecs(teamId+name, &channel, CHANNEL_CACHE_SEC)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetDeletedByName(teamId string, name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ channel := model.Channel{}
+
+ if err := s.GetReplica().SelectOne(&channel, "SELECT * FROM Channels WHERE (TeamId = :TeamId OR TeamId = '') AND Name = :Name AND DeleteAt != 0", map[string]interface{}{"TeamId": teamId, "Name": name}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlChannelStore.GetDeletedByName", "store.sql_channel.get_deleted_by_name.missing.app_error", nil, "teamId="+teamId+", "+"name="+name+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.GetDeletedByName", "store.sql_channel.get_deleted_by_name.existing.app_error", nil, "teamId="+teamId+", "+"name="+name+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = &channel
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetDeleted(teamId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ channels := &model.ChannelList{}
+
+ if _, err := s.GetReplica().Select(channels, "SELECT * FROM Channels WHERE (TeamId = :TeamId OR TeamId = '') AND DeleteAt != 0 ORDER BY DisplayName LIMIT :Limit OFFSET :Offset", map[string]interface{}{"TeamId": teamId, "Limit": limit, "Offset": offset}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlChannelStore.GetDeleted", "store.sql_channel.get_deleted.missing.app_error", nil, "teamId="+teamId+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.GetDeleted", "store.sql_channel.get_deleted.existing.app_error", nil, "teamId="+teamId+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = channels
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) SaveMember(member *model.ChannelMember) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ var result store.StoreResult
+ // Grab the channel we are saving this member to
+ if cr := <-s.GetFromMaster(member.ChannelId); cr.Err != nil {
+ result.Err = cr.Err
+ } else {
+ channel := cr.Data.(*model.Channel)
+
+ if transaction, err := s.GetMaster().Begin(); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.SaveMember", "store.sql_channel.save_member.open_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result = s.saveMemberT(transaction, member, channel)
+ if result.Err != nil {
+ transaction.Rollback()
+ } else {
+ if err := transaction.Commit(); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.SaveMember", "store.sql_channel.save_member.commit_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ // If sucessfull record members have changed in channel
+ if mu := <-s.extraUpdated(channel); mu.Err != nil {
+ result.Err = mu.Err
+ }
+ }
+ }
+ }
+
+ s.InvalidateAllChannelMembersForUser(member.UserId)
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) saveMemberT(transaction *gorp.Transaction, member *model.ChannelMember, channel *model.Channel) store.StoreResult {
+ result := store.StoreResult{}
+
+ member.PreSave()
+ if result.Err = member.IsValid(); result.Err != nil {
+ return result
+ }
+
+ if err := transaction.Insert(member); err != nil {
+ if IsUniqueConstraintError(err, []string{"ChannelId", "channelmembers_pkey"}) {
+ result.Err = model.NewAppError("SqlChannelStore.SaveMember", "store.sql_channel.save_member.exists.app_error", nil, "channel_id="+member.ChannelId+", user_id="+member.UserId+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.SaveMember", "store.sql_channel.save_member.save.app_error", nil, "channel_id="+member.ChannelId+", user_id="+member.UserId+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = member
+ }
+
+ return result
+}
+
+func (s SqlChannelStore) UpdateMember(member *model.ChannelMember) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ member.PreUpdate()
+
+ if result.Err = member.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if _, err := s.GetMaster().Update(member); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.UpdateMember", "store.sql_channel.update_member.app_error", nil, "channel_id="+member.ChannelId+", "+"user_id="+member.UserId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = member
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetMembers(channelId string, offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var members model.ChannelMembers
+ _, err := s.GetReplica().Select(&members, "SELECT * FROM ChannelMembers WHERE ChannelId = :ChannelId LIMIT :Limit OFFSET :Offset", map[string]interface{}{"ChannelId": channelId, "Limit": limit, "Offset": offset})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetMembers", "store.sql_channel.get_members.app_error", nil, "channel_id="+channelId+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = &members
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetMember(channelId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var member model.ChannelMember
+
+ if err := s.GetReplica().SelectOne(&member, "SELECT * FROM ChannelMembers WHERE ChannelId = :ChannelId AND UserId = :UserId", map[string]interface{}{"ChannelId": channelId, "UserId": userId}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlChannelStore.GetMember", store.MISSING_CHANNEL_MEMBER_ERROR, nil, "channel_id="+channelId+"user_id="+userId+","+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlChannelStore.GetMember", "store.sql_channel.get_member.app_error", nil, "channel_id="+channelId+"user_id="+userId+","+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = &member
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlChannelStore) InvalidateAllChannelMembersForUser(userId string) {
+ allChannelMembersForUserCache.Remove(userId)
+}
+
+func (us SqlChannelStore) IsUserInChannelUseCache(userId string, channelId string) bool {
+ if cacheItem, ok := allChannelMembersForUserCache.Get(userId); ok {
+ if us.metrics != nil {
+ us.metrics.IncrementMemCacheHitCounter("All Channel Members for User")
+ }
+ ids := cacheItem.(map[string]string)
+ if _, ok := ids[channelId]; ok {
+ return true
+ } else {
+ return false
+ }
+ } else {
+ if us.metrics != nil {
+ us.metrics.IncrementMemCacheMissCounter("All Channel Members for User")
+ }
+ }
+
+ if result := <-us.GetAllChannelMembersForUser(userId, true); result.Err != nil {
+ l4g.Error("SqlChannelStore.IsUserInChannelUseCache: " + result.Err.Error())
+ return false
+ } else {
+ ids := result.Data.(map[string]string)
+ if _, ok := ids[channelId]; ok {
+ return true
+ } else {
+ return false
+ }
+ }
+}
+
+func (s SqlChannelStore) GetMemberForPost(postId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ member := &model.ChannelMember{}
+ if err := s.GetReplica().SelectOne(
+ member,
+ `SELECT
+ ChannelMembers.*
+ FROM
+ ChannelMembers,
+ Posts
+ WHERE
+ ChannelMembers.ChannelId = Posts.ChannelId
+ AND ChannelMembers.UserId = :UserId
+ AND Posts.Id = :PostId`, map[string]interface{}{"UserId": userId, "PostId": postId}); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetMemberForPost", "store.sql_channel.get_member_for_post.app_error", nil, "postId="+postId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = member
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+type allChannelMember struct {
+ ChannelId string
+ Roles string
+}
+
+func (s SqlChannelStore) GetAllChannelMembersForUser(userId string, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := allChannelMembersForUserCache.Get(userId); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("All Channel Members for User")
+ }
+ result.Data = cacheItem.(map[string]string)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("All Channel Members for User")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("All Channel Members for User")
+ }
+ }
+
+ var data []allChannelMember
+ _, err := s.GetReplica().Select(&data, "SELECT ChannelId, Roles FROM Channels, ChannelMembers WHERE Channels.Id = ChannelMembers.ChannelId AND ChannelMembers.UserId = :UserId AND Channels.DeleteAt = 0", map[string]interface{}{"UserId": userId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetAllChannelMembersForUser", "store.sql_channel.get_channels.get.app_error", nil, "userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+
+ ids := make(map[string]string)
+ for i := range data {
+ ids[data[i].ChannelId] = data[i].Roles
+ }
+
+ result.Data = ids
+
+ if allowFromCache {
+ allChannelMembersForUserCache.AddWithExpiresInSecs(userId, ids, ALL_CHANNEL_MEMBERS_FOR_USER_CACHE_SEC)
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlChannelStore) InvalidateCacheForChannelMembersNotifyProps(channelId string) {
+ allChannelMembersNotifyPropsForChannelCache.Remove(channelId)
+}
+
+type allChannelMemberNotifyProps struct {
+ UserId string
+ NotifyProps model.StringMap
+}
+
+func (s SqlChannelStore) GetAllChannelMembersNotifyPropsForChannel(channelId string, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := allChannelMembersNotifyPropsForChannelCache.Get(channelId); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("All Channel Members Notify Props for Channel")
+ }
+ result.Data = cacheItem.(map[string]model.StringMap)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("All Channel Members Notify Props for Channel")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("All Channel Members Notify Props for Channel")
+ }
+ }
+
+ var data []allChannelMemberNotifyProps
+ _, err := s.GetReplica().Select(&data, `
+ SELECT ChannelMembers.UserId, ChannelMembers.NotifyProps
+ FROM Channels, ChannelMembers
+ WHERE Channels.Id = ChannelMembers.ChannelId AND ChannelMembers.ChannelId = :ChannelId`, map[string]interface{}{"ChannelId": channelId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetAllChannelMembersPropsForChannel", "store.sql_channel.get_members.app_error", nil, "channelId="+channelId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+
+ props := make(map[string]model.StringMap)
+ for i := range data {
+ props[data[i].UserId] = data[i].NotifyProps
+ }
+
+ result.Data = props
+
+ allChannelMembersNotifyPropsForChannelCache.AddWithExpiresInSecs(channelId, props, ALL_CHANNEL_MEMBERS_NOTIFY_PROPS_FOR_CHANNEL_CACHE_SEC)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlChannelStore) InvalidateMemberCount(channelId string) {
+ channelMemberCountsCache.Remove(channelId)
+}
+
+func (s SqlChannelStore) GetMemberCountFromCache(channelId string) int64 {
+ if cacheItem, ok := channelMemberCountsCache.Get(channelId); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Channel Member Counts")
+ }
+ return cacheItem.(int64)
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Channel Member Counts")
+ }
+ }
+
+ if result := <-s.GetMemberCount(channelId, true); result.Err != nil {
+ return 0
+ } else {
+ return result.Data.(int64)
+ }
+}
+
+func (s SqlChannelStore) GetMemberCount(channelId string, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := channelMemberCountsCache.Get(channelId); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Channel Member Counts")
+ }
+ result.Data = cacheItem.(int64)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Channel Member Counts")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Channel Member Counts")
+ }
+ }
+
+ count, err := s.GetReplica().SelectInt(`
+ SELECT
+ count(*)
+ FROM
+ ChannelMembers,
+ Users
+ WHERE
+ ChannelMembers.UserId = Users.Id
+ AND ChannelMembers.ChannelId = :ChannelId
+ AND Users.DeleteAt = 0`, map[string]interface{}{"ChannelId": channelId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetMemberCount", "store.sql_channel.get_member_count.app_error", nil, "channel_id="+channelId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+
+ if allowFromCache {
+ channelMemberCountsCache.AddWithExpiresInSecs(channelId, count, CHANNEL_MEMBERS_COUNTS_CACHE_SEC)
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) RemoveMember(channelId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ // Grab the channel we are saving this member to
+ if cr := <-s.Get(channelId, true); cr.Err != nil {
+ result.Err = cr.Err
+ } else {
+ channel := cr.Data.(*model.Channel)
+
+ _, err := s.GetMaster().Exec("DELETE FROM ChannelMembers WHERE ChannelId = :ChannelId AND UserId = :UserId", map[string]interface{}{"ChannelId": channelId, "UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.RemoveMember", "store.sql_channel.remove_member.app_error", nil, "channel_id="+channelId+", user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ // If sucessfull record members have changed in channel
+ if mu := <-s.extraUpdated(channel); mu.Err != nil {
+ result.Err = mu.Err
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) PermanentDeleteMembersByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM ChannelMembers WHERE UserId = :UserId", map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.RemoveMember", "store.sql_channel.permanent_delete_members_by_user.app_error", nil, "user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) UpdateLastViewedAt(channelIds []string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var query string
+ props := make(map[string]interface{})
+
+ idQuery := ""
+ for index, channelId := range channelIds {
+ if len(idQuery) > 0 {
+ idQuery += " OR "
+ }
+
+ props["channelId"+strconv.Itoa(index)] = channelId
+ idQuery += "ChannelId = :channelId" + strconv.Itoa(index)
+ }
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query = `UPDATE
+ ChannelMembers
+ SET
+ MentionCount = 0,
+ MsgCount = Channels.TotalMsgCount,
+ LastViewedAt = Channels.LastPostAt,
+ LastUpdateAt = Channels.LastPostAt
+ FROM
+ Channels
+ WHERE
+ Channels.Id = ChannelMembers.ChannelId
+ AND UserId = :UserId
+ AND (` + idQuery + `)`
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ query = `UPDATE
+ ChannelMembers, Channels
+ SET
+ ChannelMembers.MentionCount = 0,
+ ChannelMembers.MsgCount = Channels.TotalMsgCount,
+ ChannelMembers.LastViewedAt = Channels.LastPostAt,
+ ChannelMembers.LastUpdateAt = Channels.LastPostAt
+ WHERE
+ Channels.Id = ChannelMembers.ChannelId
+ AND UserId = :UserId
+ AND (` + idQuery + `)`
+ }
+
+ props["UserId"] = userId
+
+ _, err := s.GetMaster().Exec(query, props)
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.UpdateLastViewedAt", "store.sql_channel.update_last_viewed_at.app_error", nil, "channel_ids="+strings.Join(channelIds, ",")+", user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) IncrementMentionCount(channelId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec(
+ `UPDATE
+ ChannelMembers
+ SET
+ MentionCount = MentionCount + 1,
+ LastUpdateAt = :LastUpdateAt
+ WHERE
+ UserId = :UserId
+ AND ChannelId = :ChannelId`,
+ map[string]interface{}{"ChannelId": channelId, "UserId": userId, "LastUpdateAt": model.GetMillis()})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.IncrementMentionCount", "store.sql_channel.increment_mention_count.app_error", nil, "channel_id="+channelId+", user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetAll(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.Channel
+ _, err := s.GetReplica().Select(&data, "SELECT * FROM Channels WHERE TeamId = :TeamId AND Type != 'D' ORDER BY Name", map[string]interface{}{"TeamId": teamId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetAll", "store.sql_channel.get_all.app_error", nil, "teamId="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = data
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetForPost(postId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ channel := &model.Channel{}
+ if err := s.GetReplica().SelectOne(
+ channel,
+ `SELECT
+ Channels.*
+ FROM
+ Channels,
+ Posts
+ WHERE
+ Channels.Id = Posts.ChannelId
+ AND Posts.Id = :PostId`, map[string]interface{}{"PostId": postId}); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetForPost", "store.sql_channel.get_for_post.app_error", nil, "postId="+postId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = channel
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) AnalyticsTypeCount(teamId string, channelType string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := "SELECT COUNT(Id) AS Value FROM Channels WHERE Type = :ChannelType"
+
+ if len(teamId) > 0 {
+ query += " AND TeamId = :TeamId"
+ }
+
+ v, err := s.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId, "ChannelType": channelType})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.AnalyticsTypeCount", "store.sql_channel.analytics_type_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) AnalyticsDeletedTypeCount(teamId string, channelType string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := "SELECT COUNT(Id) AS Value FROM Channels WHERE Type = :ChannelType AND DeleteAt > 0"
+
+ if len(teamId) > 0 {
+ query += " AND TeamId = :TeamId"
+ }
+
+ v, err := s.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId, "ChannelType": channelType})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.AnalyticsDeletedTypeCount", "store.sql_channel.analytics_deleted_type_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) ExtraUpdateByUser(userId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec(
+ `UPDATE Channels SET ExtraUpdateAt = :Time
+ WHERE Id IN (SELECT ChannelId FROM ChannelMembers WHERE UserId = :UserId);`,
+ map[string]interface{}{"UserId": userId, "Time": time})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.extraUpdated", "store.sql_channel.extra_updated.app_error", nil, "user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) GetMembersForUser(teamId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ members := &model.ChannelMembers{}
+ _, err := s.GetReplica().Select(members, `
+ SELECT cm.*
+ FROM ChannelMembers cm
+ INNER JOIN Channels c
+ ON c.Id = cm.ChannelId
+ AND (c.TeamId = :TeamId OR c.TeamId = '')
+ AND c.DeleteAt = 0
+ WHERE cm.UserId = :UserId
+ `, map[string]interface{}{"TeamId": teamId, "UserId": userId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetMembersForUser", "store.sql_channel.get_members.app_error", nil, "teamId="+teamId+", userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = members
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) SearchInTeam(teamId string, term string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := `
+ SELECT
+ *
+ FROM
+ Channels
+ WHERE
+ TeamId = :TeamId
+ AND Type = 'O'
+ AND DeleteAt = 0
+ SEARCH_CLAUSE
+ ORDER BY DisplayName
+ LIMIT 100`
+
+ storeChannel <- s.performSearch(searchQuery, term, map[string]interface{}{"TeamId": teamId})
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) SearchMore(userId string, teamId string, term string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := `
+ SELECT
+ *
+ FROM
+ Channels
+ WHERE
+ TeamId = :TeamId
+ AND Type = 'O'
+ AND DeleteAt = 0
+ AND Id NOT IN (SELECT
+ Channels.Id
+ FROM
+ Channels,
+ ChannelMembers
+ WHERE
+ Id = ChannelId
+ AND TeamId = :TeamId
+ AND UserId = :UserId
+ AND DeleteAt = 0)
+ SEARCH_CLAUSE
+ ORDER BY DisplayName
+ LIMIT 100`
+
+ storeChannel <- s.performSearch(searchQuery, term, map[string]interface{}{"TeamId": teamId, "UserId": userId})
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlChannelStore) performSearch(searchQuery string, term string, parameters map[string]interface{}) store.StoreResult {
+ result := store.StoreResult{}
+
+ // these chars have special meaning and can be treated as spaces
+ for _, c := range specialUserSearchChar {
+ term = strings.Replace(term, c, " ", -1)
+ }
+
+ if term == "" {
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", "", 1)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ splitTerm := strings.Fields(term)
+ for i, t := range strings.Fields(term) {
+ if i == len(splitTerm)-1 {
+ splitTerm[i] = t + ":*"
+ } else {
+ splitTerm[i] = t + ":* &"
+ }
+ }
+
+ term = strings.Join(splitTerm, " ")
+
+ searchClause := fmt.Sprintf("AND (%s) @@ to_tsquery('simple', :Term)", "Name || ' ' || DisplayName")
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", searchClause, 1)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ splitTerm := strings.Fields(term)
+ for i, t := range strings.Fields(term) {
+ splitTerm[i] = "+" + t + "*"
+ }
+
+ term = strings.Join(splitTerm, " ")
+
+ searchClause := fmt.Sprintf("AND MATCH(%s) AGAINST (:Term IN BOOLEAN MODE)", "Name, DisplayName")
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", searchClause, 1)
+ }
+
+ var channels model.ChannelList
+
+ parameters["Term"] = term
+
+ if _, err := s.GetReplica().Select(&channels, searchQuery, parameters); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.Search", "store.sql_channel.search.app_error", nil, "term="+term+", "+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = &channels
+ }
+
+ return result
+}
+
+func (s SqlChannelStore) GetMembersByIds(channelId string, userIds []string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var members model.ChannelMembers
+ props := make(map[string]interface{})
+ idQuery := ""
+
+ for index, userId := range userIds {
+ if len(idQuery) > 0 {
+ idQuery += ", "
+ }
+
+ props["userId"+strconv.Itoa(index)] = userId
+ idQuery += ":userId" + strconv.Itoa(index)
+ }
+
+ props["ChannelId"] = channelId
+
+ if _, err := s.GetReplica().Select(&members, "SELECT * FROM ChannelMembers WHERE ChannelId = :ChannelId AND UserId IN ("+idQuery+")", props); err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.GetMembersByIds", "store.sql_channel.get_members_by_ids.app_error", nil, "channelId="+channelId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = &members
+
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/channel_store_test.go b/store/sqlstore/channel_store_test.go
new file mode 100644
index 000000000..490300f24
--- /dev/null
+++ b/store/sqlstore/channel_store_test.go
@@ -0,0 +1,1962 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+func TestChannelStoreSave(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ o1 := model.Channel{}
+ o1.TeamId = teamId
+ o1.DisplayName = "Name"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+
+ if err := (<-ss.Channel().Save(&o1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Channel().Save(&o1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+
+ o1.Id = ""
+ if err := (<-ss.Channel().Save(&o1)).Err; err == nil {
+ t.Fatal("should be unique name")
+ }
+
+ o1.Id = ""
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_DIRECT
+ if err := (<-ss.Channel().Save(&o1)).Err; err == nil {
+ t.Fatal("Should not be able to save direct channel")
+ }
+}
+
+func TestChannelStoreSaveDirectChannel(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ o1 := model.Channel{}
+ o1.TeamId = teamId
+ o1.DisplayName = "Name"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_DIRECT
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Nickname = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Nickname = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u2.Id}))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = u1.Id
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o1.Id
+ m2.UserId = u2.Id
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ if err := (<-ss.Channel().SaveDirectChannel(&o1, &m1, &m2)).Err; err != nil {
+ t.Fatal("couldn't save direct channel", err)
+ }
+
+ members := (<-ss.Channel().GetMembers(o1.Id, 0, 100)).Data.(*model.ChannelMembers)
+ if len(*members) != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+
+ if err := (<-ss.Channel().SaveDirectChannel(&o1, &m1, &m2)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+
+ // Attempt to save a direct channel that already exists
+ o1a := model.Channel{
+ TeamId: o1.TeamId,
+ DisplayName: o1.DisplayName,
+ Name: o1.Name,
+ Type: o1.Type,
+ }
+
+ if result := <-ss.Channel().SaveDirectChannel(&o1a, &m1, &m2); result.Err == nil {
+ t.Fatal("should've failed to save a duplicate direct channel")
+ } else if result.Err.Id != store.CHANNEL_EXISTS_ERROR {
+ t.Fatal("should've returned CHANNEL_EXISTS_ERROR")
+ } else if returned := result.Data.(*model.Channel); returned.Id != o1.Id {
+ t.Fatal("should've returned original channel when saving a duplicate direct channel")
+ }
+
+ // Attempt to save a non-direct channel
+ o1.Id = ""
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ if err := (<-ss.Channel().SaveDirectChannel(&o1, &m1, &m2)).Err; err == nil {
+ t.Fatal("Should not be able to save non-direct channel")
+ }
+}
+
+func TestChannelStoreCreateDirectChannel(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Nickname = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Nickname = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u2.Id}))
+
+ res := <-ss.Channel().CreateDirectChannel(u1.Id, u2.Id)
+ if res.Err != nil {
+ t.Fatal("couldn't create direct channel", res.Err)
+ }
+
+ c1 := res.Data.(*model.Channel)
+
+ members := (<-ss.Channel().GetMembers(c1.Id, 0, 100)).Data.(*model.ChannelMembers)
+ if len(*members) != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+}
+
+func TestChannelStoreUpdate(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Name"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = o1.TeamId
+ o2.DisplayName = "Name"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ time.Sleep(100 * time.Millisecond)
+
+ if err := (<-ss.Channel().Update(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ o1.Id = "missing"
+ if err := (<-ss.Channel().Update(&o1)).Err; err == nil {
+ t.Fatal("Update should have failed because of missing key")
+ }
+
+ o1.Id = model.NewId()
+ if err := (<-ss.Channel().Update(&o1)).Err; err == nil {
+ t.Fatal("Update should have faile because id change")
+ }
+
+ o2.Name = o1.Name
+ if err := (<-ss.Channel().Update(&o2)).Err; err == nil {
+ t.Fatal("Update should have failed because of existing name")
+ }
+}
+
+func TestGetChannelUnread(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+ teamId2 := model.NewId()
+
+ uid := model.NewId()
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: uid}
+ m2 := &model.TeamMember{TeamId: teamId2, UserId: uid}
+ store.Must(ss.Team().SaveMember(m1))
+ store.Must(ss.Team().SaveMember(m2))
+ notifyPropsModel := model.GetDefaultChannelNotifyProps()
+
+ // Setup Channel 1
+ c1 := &model.Channel{TeamId: m1.TeamId, Name: model.NewId(), DisplayName: "Downtown", Type: model.CHANNEL_OPEN, TotalMsgCount: 100}
+ store.Must(ss.Channel().Save(c1))
+ cm1 := &model.ChannelMember{ChannelId: c1.Id, UserId: m1.UserId, NotifyProps: notifyPropsModel, MsgCount: 90}
+ store.Must(ss.Channel().SaveMember(cm1))
+
+ // Setup Channel 2
+ c2 := &model.Channel{TeamId: m2.TeamId, Name: model.NewId(), DisplayName: "Cultural", Type: model.CHANNEL_OPEN, TotalMsgCount: 100}
+ store.Must(ss.Channel().Save(c2))
+ cm2 := &model.ChannelMember{ChannelId: c2.Id, UserId: m2.UserId, NotifyProps: notifyPropsModel, MsgCount: 90, MentionCount: 5}
+ store.Must(ss.Channel().SaveMember(cm2))
+
+ // Check for Channel 1
+ if resp := <-ss.Channel().GetChannelUnread(c1.Id, uid); resp.Err != nil {
+ t.Fatal(resp.Err)
+ } else {
+ ch := resp.Data.(*model.ChannelUnread)
+ if c1.Id != ch.ChannelId {
+ t.Fatal("wrong channel id")
+ }
+
+ if teamId1 != ch.TeamId {
+ t.Fatal("wrong team id for channel 1")
+ }
+
+ if ch.NotifyProps == nil {
+ t.Fatal("wrong props for channel 1")
+ }
+
+ if ch.MentionCount != 0 {
+ t.Fatal("wrong MentionCount for channel 1")
+ }
+
+ if ch.MsgCount != 10 {
+ t.Fatal("wrong MsgCount for channel 1")
+ }
+ }
+
+ // Check for Channel 2
+ if resp2 := <-ss.Channel().GetChannelUnread(c2.Id, uid); resp2.Err != nil {
+ t.Fatal(resp2.Err)
+ } else {
+ ch2 := resp2.Data.(*model.ChannelUnread)
+ if c2.Id != ch2.ChannelId {
+ t.Fatal("wrong channel id")
+ }
+
+ if teamId2 != ch2.TeamId {
+ t.Fatal("wrong team id")
+ }
+
+ if ch2.MentionCount != 5 {
+ t.Fatal("wrong MentionCount for channel 2")
+ }
+
+ if ch2.MsgCount != 10 {
+ t.Fatal("wrong MsgCount for channel 2")
+ }
+ }
+}
+
+func TestChannelStoreGet(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Name"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ if r1 := <-ss.Channel().Get(o1.Id, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Channel).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned channel")
+ }
+ }
+
+ if err := (<-ss.Channel().Get("", false)).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Nickname = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ u2 := model.User{}
+ u2.Email = model.NewId()
+ u2.Nickname = model.NewId()
+ store.Must(ss.User().Save(&u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u2.Id}))
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Direct Name"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_DIRECT
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o2.Id
+ m1.UserId = u1.Id
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o2.Id
+ m2.UserId = u2.Id
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ store.Must(ss.Channel().SaveDirectChannel(&o2, &m1, &m2))
+
+ if r2 := <-ss.Channel().Get(o2.Id, false); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if r2.Data.(*model.Channel).ToJson() != o2.ToJson() {
+ t.Fatal("invalid returned channel")
+ }
+ }
+
+ if r4 := <-ss.Channel().Get(o2.Id, true); r4.Err != nil {
+ t.Fatal(r4.Err)
+ } else {
+ if r4.Data.(*model.Channel).ToJson() != o2.ToJson() {
+ t.Fatal("invalid returned channel")
+ }
+ }
+
+ if r3 := <-ss.Channel().GetAll(o1.TeamId); r3.Err != nil {
+ t.Fatal(r3.Err)
+ } else {
+ channels := r3.Data.([]*model.Channel)
+ if len(channels) == 0 {
+ t.Fatal("too little")
+ }
+ }
+
+ if r3 := <-ss.Channel().GetTeamChannels(o1.TeamId); r3.Err != nil {
+ t.Fatal(r3.Err)
+ } else {
+ channels := r3.Data.(*model.ChannelList)
+ if len(*channels) == 0 {
+ t.Fatal("too little")
+ }
+ }
+}
+
+func TestChannelStoreGetForPost(t *testing.T) {
+ ss := Setup()
+
+ o1 := store.Must(ss.Channel().Save(&model.Channel{
+ TeamId: model.NewId(),
+ DisplayName: "Name",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ })).(*model.Channel)
+
+ p1 := store.Must(ss.Post().Save(&model.Post{
+ UserId: model.NewId(),
+ ChannelId: o1.Id,
+ Message: "test",
+ })).(*model.Post)
+
+ if r1 := <-ss.Channel().GetForPost(p1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else if r1.Data.(*model.Channel).Id != o1.Id {
+ t.Fatal("incorrect channel returned")
+ }
+}
+
+func TestSqlChannelStoreRestore(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ if r := <-ss.Channel().Delete(o1.Id, model.GetMillis()); r.Err != nil {
+ t.Fatal(r.Err)
+ }
+
+ if r := <-ss.Channel().Get(o1.Id, false); r.Data.(*model.Channel).DeleteAt == 0 {
+ t.Fatal("should have been deleted")
+ }
+
+ if r := <-ss.Channel().Restore(o1.Id, model.GetMillis()); r.Err != nil {
+ t.Fatal(r.Err)
+ }
+
+ if r := <-ss.Channel().Get(o1.Id, false); r.Data.(*model.Channel).DeleteAt != 0 {
+ t.Fatal("should have been restored")
+ }
+
+}
+
+func TestChannelStoreDelete(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = o1.TeamId
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ o3 := model.Channel{}
+ o3.TeamId = o1.TeamId
+ o3.DisplayName = "Channel3"
+ o3.Name = "zz" + model.NewId() + "b"
+ o3.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o3))
+
+ o4 := model.Channel{}
+ o4.TeamId = o1.TeamId
+ o4.DisplayName = "Channel4"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o4))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o2.Id
+ m2.UserId = m1.UserId
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ if r := <-ss.Channel().Delete(o1.Id, model.GetMillis()); r.Err != nil {
+ t.Fatal(r.Err)
+ }
+
+ if r := <-ss.Channel().Get(o1.Id, false); r.Data.(*model.Channel).DeleteAt == 0 {
+ t.Fatal("should have been deleted")
+ }
+
+ if r := <-ss.Channel().Delete(o3.Id, model.GetMillis()); r.Err != nil {
+ t.Fatal(r.Err)
+ }
+
+ cresult := <-ss.Channel().GetChannels(o1.TeamId, m1.UserId)
+ list := cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("invalid number of channels")
+ }
+
+ cresult = <-ss.Channel().GetMoreChannels(o1.TeamId, m1.UserId, 0, 100)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("invalid number of channels")
+ }
+
+ <-ss.Channel().PermanentDelete(o2.Id)
+
+ cresult = <-ss.Channel().GetChannels(o1.TeamId, m1.UserId)
+ t.Log(cresult.Err)
+ if cresult.Err.Id != "store.sql_channel.get_channels.not_found.app_error" {
+ t.Fatal("no channels should be found")
+ }
+
+ if r := <-ss.Channel().PermanentDeleteByTeam(o1.TeamId); r.Err != nil {
+ t.Fatal(r.Err)
+ }
+}
+
+func TestChannelStoreGetByName(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Name"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ r1 := <-ss.Channel().GetByName(o1.TeamId, o1.Name, true)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Channel).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned channel")
+ }
+ }
+
+ if err := (<-ss.Channel().GetByName(o1.TeamId, "", true)).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+
+ if r1 := <-ss.Channel().GetByName(o1.TeamId, o1.Name, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Channel).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned channel")
+ }
+ }
+
+ if err := (<-ss.Channel().GetByName(o1.TeamId, "", false)).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+
+ store.Must(ss.Channel().Delete(r1.Data.(*model.Channel).Id, model.GetMillis()))
+
+ if err := (<-ss.Channel().GetByName(o1.TeamId, "", false)).Err; err == nil {
+ t.Fatal("Deleted channel should not be returned by GetByName()")
+ }
+}
+
+func TestChannelStoreGetDeletedByName(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Name"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ o1.DeleteAt = model.GetMillis()
+ store.Must(ss.Channel().Save(&o1))
+
+ if r1 := <-ss.Channel().GetDeletedByName(o1.TeamId, o1.Name); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Channel).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned channel")
+ }
+ }
+
+ if err := (<-ss.Channel().GetDeletedByName(o1.TeamId, "")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestChannelStoreGetDeleted(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ o1.DeleteAt = model.GetMillis()
+ store.Must(ss.Channel().Save(&o1))
+
+ cresult := <-ss.Channel().GetDeleted(o1.TeamId, 0, 100)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list := cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list")
+ }
+
+ if (*list)[0].Name != o1.Name {
+ t.Fatal("missing channel")
+ }
+
+ o2 := model.Channel{}
+ o2.TeamId = o1.TeamId
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ cresult = <-ss.Channel().GetDeleted(o1.TeamId, 0, 100)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list")
+ }
+
+ o3 := model.Channel{}
+ o3.TeamId = o1.TeamId
+ o3.DisplayName = "Channel3"
+ o3.Name = "zz" + model.NewId() + "b"
+ o3.Type = model.CHANNEL_OPEN
+ o3.DeleteAt = model.GetMillis()
+ store.Must(ss.Channel().Save(&o3))
+
+ cresult = <-ss.Channel().GetDeleted(o1.TeamId, 0, 100)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 2 {
+ t.Fatal("wrong list length")
+ }
+
+ cresult = <-ss.Channel().GetDeleted(o1.TeamId, 0, 1)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list length")
+ }
+
+ cresult = <-ss.Channel().GetDeleted(o1.TeamId, 1, 1)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list length")
+ }
+
+}
+
+func TestChannelMemberStore(t *testing.T) {
+ ss := Setup()
+
+ c1 := model.Channel{}
+ c1.TeamId = model.NewId()
+ c1.DisplayName = "NameName"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = *store.Must(ss.Channel().Save(&c1)).(*model.Channel)
+
+ c1t1 := (<-ss.Channel().Get(c1.Id, false)).Data.(*model.Channel)
+ t1 := c1t1.ExtraUpdateAt
+
+ u1 := model.User{}
+ u1.Email = model.NewId()
+ u1.Nickname = model.NewId()
+ store.Must(ss.User().Save(&u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ u2 := model.User{}
+ u2.Email = model.NewId()
+ u2.Nickname = model.NewId()
+ store.Must(ss.User().Save(&u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u2.Id}))
+
+ o1 := model.ChannelMember{}
+ o1.ChannelId = c1.Id
+ o1.UserId = u1.Id
+ o1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&o1))
+
+ o2 := model.ChannelMember{}
+ o2.ChannelId = c1.Id
+ o2.UserId = u2.Id
+ o2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&o2))
+
+ c1t2 := (<-ss.Channel().Get(c1.Id, false)).Data.(*model.Channel)
+ t2 := c1t2.ExtraUpdateAt
+
+ if t2 <= t1 {
+ t.Fatal("Member update time incorrect")
+ }
+
+ count := (<-ss.Channel().GetMemberCount(o1.ChannelId, true)).Data.(int64)
+ if count != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+
+ count = (<-ss.Channel().GetMemberCount(o1.ChannelId, true)).Data.(int64)
+ if count != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+
+ if ss.Channel().GetMemberCountFromCache(o1.ChannelId) != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+
+ if ss.Channel().GetMemberCountFromCache("junk") != 0 {
+ t.Fatal("should have saved 0 members")
+ }
+
+ count = (<-ss.Channel().GetMemberCount(o1.ChannelId, false)).Data.(int64)
+ if count != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+
+ store.Must(ss.Channel().RemoveMember(o2.ChannelId, o2.UserId))
+
+ count = (<-ss.Channel().GetMemberCount(o1.ChannelId, false)).Data.(int64)
+ if count != 1 {
+ t.Fatal("should have removed 1 member")
+ }
+
+ c1t3 := (<-ss.Channel().Get(c1.Id, false)).Data.(*model.Channel)
+ t3 := c1t3.ExtraUpdateAt
+
+ if t3 <= t2 || t3 <= t1 {
+ t.Fatal("Member update time incorrect on delete")
+ }
+
+ member := (<-ss.Channel().GetMember(o1.ChannelId, o1.UserId)).Data.(*model.ChannelMember)
+ if member.ChannelId != o1.ChannelId {
+ t.Fatal("should have go member")
+ }
+
+ if err := (<-ss.Channel().SaveMember(&o1)).Err; err == nil {
+ t.Fatal("Should have been a duplicate")
+ }
+
+ c1t4 := (<-ss.Channel().Get(c1.Id, false)).Data.(*model.Channel)
+ t4 := c1t4.ExtraUpdateAt
+ if t4 != t3 {
+ t.Fatal("Should not update time upon failure")
+ }
+}
+
+func TestChannelDeleteMemberStore(t *testing.T) {
+ ss := Setup()
+
+ c1 := model.Channel{}
+ c1.TeamId = model.NewId()
+ c1.DisplayName = "NameName"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = *store.Must(ss.Channel().Save(&c1)).(*model.Channel)
+
+ c1t1 := (<-ss.Channel().Get(c1.Id, false)).Data.(*model.Channel)
+ t1 := c1t1.ExtraUpdateAt
+
+ u1 := model.User{}
+ u1.Email = model.NewId()
+ u1.Nickname = model.NewId()
+ store.Must(ss.User().Save(&u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ u2 := model.User{}
+ u2.Email = model.NewId()
+ u2.Nickname = model.NewId()
+ store.Must(ss.User().Save(&u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u2.Id}))
+
+ o1 := model.ChannelMember{}
+ o1.ChannelId = c1.Id
+ o1.UserId = u1.Id
+ o1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&o1))
+
+ o2 := model.ChannelMember{}
+ o2.ChannelId = c1.Id
+ o2.UserId = u2.Id
+ o2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&o2))
+
+ c1t2 := (<-ss.Channel().Get(c1.Id, false)).Data.(*model.Channel)
+ t2 := c1t2.ExtraUpdateAt
+
+ if t2 <= t1 {
+ t.Fatal("Member update time incorrect")
+ }
+
+ count := (<-ss.Channel().GetMemberCount(o1.ChannelId, false)).Data.(int64)
+ if count != 2 {
+ t.Fatal("should have saved 2 members")
+ }
+
+ store.Must(ss.Channel().PermanentDeleteMembersByUser(o2.UserId))
+
+ count = (<-ss.Channel().GetMemberCount(o1.ChannelId, false)).Data.(int64)
+ if count != 1 {
+ t.Fatal("should have removed 1 member")
+ }
+
+ if r1 := <-ss.Channel().PermanentDeleteMembersByChannel(o1.ChannelId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ count = (<-ss.Channel().GetMemberCount(o1.ChannelId, false)).Data.(int64)
+ if count != 0 {
+ t.Fatal("should have removed all members")
+ }
+}
+
+func TestChannelStoreGetChannels(t *testing.T) {
+ ss := Setup()
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o1.Id
+ m2.UserId = model.NewId()
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = o2.Id
+ m3.UserId = model.NewId()
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ cresult := <-ss.Channel().GetChannels(o1.TeamId, m1.UserId)
+ list := cresult.Data.(*model.ChannelList)
+
+ if (*list)[0].Id != o1.Id {
+ t.Fatal("missing channel")
+ }
+
+ acresult := <-ss.Channel().GetAllChannelMembersForUser(m1.UserId, false)
+ ids := acresult.Data.(map[string]string)
+ if _, ok := ids[o1.Id]; !ok {
+ t.Fatal("missing channel")
+ }
+
+ acresult2 := <-ss.Channel().GetAllChannelMembersForUser(m1.UserId, true)
+ ids2 := acresult2.Data.(map[string]string)
+ if _, ok := ids2[o1.Id]; !ok {
+ t.Fatal("missing channel")
+ }
+
+ acresult3 := <-ss.Channel().GetAllChannelMembersForUser(m1.UserId, true)
+ ids3 := acresult3.Data.(map[string]string)
+ if _, ok := ids3[o1.Id]; !ok {
+ t.Fatal("missing channel")
+ }
+
+ if !ss.Channel().IsUserInChannelUseCache(m1.UserId, o1.Id) {
+ t.Fatal("missing channel")
+ }
+
+ if ss.Channel().IsUserInChannelUseCache(m1.UserId, o2.Id) {
+ t.Fatal("missing channel")
+ }
+
+ if ss.Channel().IsUserInChannelUseCache(m1.UserId, "blahblah") {
+ t.Fatal("missing channel")
+ }
+
+ if ss.Channel().IsUserInChannelUseCache("blahblah", "blahblah") {
+ t.Fatal("missing channel")
+ }
+
+ ss.Channel().InvalidateAllChannelMembersForUser(m1.UserId)
+}
+
+func TestChannelStoreGetMoreChannels(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o1.Id
+ m2.UserId = model.NewId()
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = o2.Id
+ m3.UserId = model.NewId()
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ o3 := model.Channel{}
+ o3.TeamId = o1.TeamId
+ o3.DisplayName = "ChannelA"
+ o3.Name = "zz" + model.NewId() + "b"
+ o3.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o3))
+
+ o4 := model.Channel{}
+ o4.TeamId = o1.TeamId
+ o4.DisplayName = "ChannelB"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o4))
+
+ o5 := model.Channel{}
+ o5.TeamId = o1.TeamId
+ o5.DisplayName = "ChannelC"
+ o5.Name = "zz" + model.NewId() + "b"
+ o5.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o5))
+
+ cresult := <-ss.Channel().GetMoreChannels(o1.TeamId, m1.UserId, 0, 100)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list := cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list")
+ }
+
+ if (*list)[0].Name != o3.Name {
+ t.Fatal("missing channel")
+ }
+
+ o6 := model.Channel{}
+ o6.TeamId = o1.TeamId
+ o6.DisplayName = "ChannelA"
+ o6.Name = "zz" + model.NewId() + "b"
+ o6.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o6))
+
+ cresult = <-ss.Channel().GetMoreChannels(o1.TeamId, m1.UserId, 0, 100)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 2 {
+ t.Fatal("wrong list length")
+ }
+
+ cresult = <-ss.Channel().GetMoreChannels(o1.TeamId, m1.UserId, 0, 1)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list length")
+ }
+
+ cresult = <-ss.Channel().GetMoreChannels(o1.TeamId, m1.UserId, 1, 1)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list length")
+ }
+
+ if r1 := <-ss.Channel().AnalyticsTypeCount(o1.TeamId, model.CHANNEL_OPEN); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) != 3 {
+ t.Log(r1.Data)
+ t.Fatal("wrong value")
+ }
+ }
+
+ if r1 := <-ss.Channel().AnalyticsTypeCount(o1.TeamId, model.CHANNEL_PRIVATE); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) != 2 {
+ t.Log(r1.Data)
+ t.Fatal("wrong value")
+ }
+ }
+}
+
+func TestChannelStoreGetPublicChannelsForTeam(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "OpenChannel1Team1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "OpenChannel1Team2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ o3 := model.Channel{}
+ o3.TeamId = o1.TeamId
+ o3.DisplayName = "PrivateChannel1Team1"
+ o3.Name = "zz" + model.NewId() + "b"
+ o3.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o3))
+
+ cresult := <-ss.Channel().GetPublicChannelsForTeam(o1.TeamId, 0, 100)
+ if cresult.Err != nil {
+ t.Fatal(cresult.Err)
+ }
+ list := cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list")
+ }
+
+ if (*list)[0].Name != o1.Name {
+ t.Fatal("missing channel")
+ }
+
+ o4 := model.Channel{}
+ o4.TeamId = o1.TeamId
+ o4.DisplayName = "OpenChannel2Team1"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o4))
+
+ cresult = <-ss.Channel().GetPublicChannelsForTeam(o1.TeamId, 0, 100)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 2 {
+ t.Fatal("wrong list length")
+ }
+
+ cresult = <-ss.Channel().GetPublicChannelsForTeam(o1.TeamId, 0, 1)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list length")
+ }
+
+ cresult = <-ss.Channel().GetPublicChannelsForTeam(o1.TeamId, 1, 1)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("wrong list length")
+ }
+
+ if r1 := <-ss.Channel().AnalyticsTypeCount(o1.TeamId, model.CHANNEL_OPEN); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) != 2 {
+ t.Log(r1.Data)
+ t.Fatal("wrong value")
+ }
+ }
+
+ if r1 := <-ss.Channel().AnalyticsTypeCount(o1.TeamId, model.CHANNEL_PRIVATE); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) != 1 {
+ t.Log(r1.Data)
+ t.Fatal("wrong value")
+ }
+ }
+}
+
+func TestChannelStoreGetPublicChannelsByIdsForTeam(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+
+ oc1 := model.Channel{}
+ oc1.TeamId = teamId1
+ oc1.DisplayName = "OpenChannel1Team1"
+ oc1.Name = "zz" + model.NewId() + "b"
+ oc1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&oc1))
+
+ oc2 := model.Channel{}
+ oc2.TeamId = model.NewId()
+ oc2.DisplayName = "OpenChannel2TeamOther"
+ oc2.Name = "zz" + model.NewId() + "b"
+ oc2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&oc2))
+
+ pc3 := model.Channel{}
+ pc3.TeamId = teamId1
+ pc3.DisplayName = "PrivateChannel3Team1"
+ pc3.Name = "zz" + model.NewId() + "b"
+ pc3.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&pc3))
+
+ cids := []string{oc1.Id}
+ cresult := <-ss.Channel().GetPublicChannelsByIdsForTeam(teamId1, cids)
+ list := cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("should return 1 channel")
+ }
+
+ if (*list)[0].Id != oc1.Id {
+ t.Fatal("missing channel")
+ }
+
+ cids = append(cids, oc2.Id)
+ cids = append(cids, model.NewId())
+ cids = append(cids, pc3.Id)
+ cresult = <-ss.Channel().GetPublicChannelsByIdsForTeam(teamId1, cids)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 1 {
+ t.Fatal("should return 1 channel")
+ }
+
+ oc4 := model.Channel{}
+ oc4.TeamId = teamId1
+ oc4.DisplayName = "OpenChannel4Team1"
+ oc4.Name = "zz" + model.NewId() + "b"
+ oc4.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&oc4))
+
+ cids = append(cids, oc4.Id)
+ cresult = <-ss.Channel().GetPublicChannelsByIdsForTeam(teamId1, cids)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 2 {
+ t.Fatal("should return 2 channels")
+ }
+
+ if (*list)[0].Id != oc1.Id {
+ t.Fatal("missing channel")
+ }
+
+ if (*list)[1].Id != oc4.Id {
+ t.Fatal("missing channel")
+ }
+
+ cids = cids[:0]
+ cids = append(cids, model.NewId())
+ cresult = <-ss.Channel().GetPublicChannelsByIdsForTeam(teamId1, cids)
+ list = cresult.Data.(*model.ChannelList)
+
+ if len(*list) != 0 {
+ t.Fatal("should not return a channel")
+ }
+}
+
+func TestChannelStoreGetChannelCounts(t *testing.T) {
+ ss := Setup()
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o1.Id
+ m2.UserId = model.NewId()
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = o2.Id
+ m3.UserId = model.NewId()
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ cresult := <-ss.Channel().GetChannelCounts(o1.TeamId, m1.UserId)
+ counts := cresult.Data.(*model.ChannelCounts)
+
+ if len(counts.Counts) != 1 {
+ t.Fatal("wrong number of counts")
+ }
+
+ if len(counts.UpdateTimes) != 1 {
+ t.Fatal("wrong number of update times")
+ }
+}
+
+func TestChannelStoreGetMembersForUser(t *testing.T) {
+ ss := Setup()
+
+ t1 := model.Team{}
+ t1.DisplayName = "Name"
+ t1.Name = model.NewId()
+ t1.Email = model.NewId() + "@nowhere.com"
+ t1.Type = model.TEAM_OPEN
+ store.Must(ss.Team().Save(&t1))
+
+ o1 := model.Channel{}
+ o1.TeamId = t1.Id
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = o1.TeamId
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o2.Id
+ m2.UserId = m1.UserId
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ cresult := <-ss.Channel().GetMembersForUser(o1.TeamId, m1.UserId)
+ members := cresult.Data.(*model.ChannelMembers)
+
+ // no unread messages
+ if len(*members) != 2 {
+ t.Fatal("wrong number of members")
+ }
+}
+
+func TestChannelStoreUpdateLastViewedAt(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ o1.TotalMsgCount = 25
+ store.Must(ss.Channel().Save(&o1))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ err := (<-ss.Channel().UpdateLastViewedAt([]string{m1.ChannelId}, m1.UserId)).Err
+ if err != nil {
+ t.Fatal("failed to update", err)
+ }
+
+ err = (<-ss.Channel().UpdateLastViewedAt([]string{m1.ChannelId}, "missing id")).Err
+ if err != nil {
+ t.Fatal("failed to update")
+ }
+}
+
+func TestChannelStoreIncrementMentionCount(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "Channel1"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ o1.TotalMsgCount = 25
+ store.Must(ss.Channel().Save(&o1))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ err := (<-ss.Channel().IncrementMentionCount(m1.ChannelId, m1.UserId)).Err
+ if err != nil {
+ t.Fatal("failed to update")
+ }
+
+ err = (<-ss.Channel().IncrementMentionCount(m1.ChannelId, "missing id")).Err
+ if err != nil {
+ t.Fatal("failed to update")
+ }
+
+ err = (<-ss.Channel().IncrementMentionCount("missing id", m1.UserId)).Err
+ if err != nil {
+ t.Fatal("failed to update")
+ }
+
+ err = (<-ss.Channel().IncrementMentionCount("missing id", "missing id")).Err
+ if err != nil {
+ t.Fatal("failed to update")
+ }
+}
+
+func TestUpdateChannelMember(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+
+ c1 := &model.Channel{
+ TeamId: model.NewId(),
+ DisplayName: model.NewId(),
+ Name: model.NewId(),
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(c1))
+
+ m1 := &model.ChannelMember{
+ ChannelId: c1.Id,
+ UserId: userId,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(m1))
+
+ m1.NotifyProps["test"] = "sometext"
+ if result := <-ss.Channel().UpdateMember(m1); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ m1.UserId = ""
+ if result := <-ss.Channel().UpdateMember(m1); result.Err == nil {
+ t.Fatal("bad user id - should fail")
+ }
+}
+
+func TestGetMember(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+
+ c1 := &model.Channel{
+ TeamId: model.NewId(),
+ DisplayName: model.NewId(),
+ Name: model.NewId(),
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(c1))
+
+ c2 := &model.Channel{
+ TeamId: c1.TeamId,
+ DisplayName: model.NewId(),
+ Name: model.NewId(),
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(c2))
+
+ m1 := &model.ChannelMember{
+ ChannelId: c1.Id,
+ UserId: userId,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(m1))
+
+ m2 := &model.ChannelMember{
+ ChannelId: c2.Id,
+ UserId: userId,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(m2))
+
+ if result := <-ss.Channel().GetMember(model.NewId(), userId); result.Err == nil {
+ t.Fatal("should've failed to get member for non-existant channel")
+ }
+
+ if result := <-ss.Channel().GetMember(c1.Id, model.NewId()); result.Err == nil {
+ t.Fatal("should've failed to get member for non-existant user")
+ }
+
+ if result := <-ss.Channel().GetMember(c1.Id, userId); result.Err != nil {
+ t.Fatal("shouldn't have errored when getting member", result.Err)
+ } else if member := result.Data.(*model.ChannelMember); member.ChannelId != c1.Id {
+ t.Fatal("should've gotten member of channel 1")
+ } else if member.UserId != userId {
+ t.Fatal("should've gotten member for user")
+ }
+
+ if result := <-ss.Channel().GetMember(c2.Id, userId); result.Err != nil {
+ t.Fatal("shouldn't have errored when getting member", result.Err)
+ } else if member := result.Data.(*model.ChannelMember); member.ChannelId != c2.Id {
+ t.Fatal("should've gotten member of channel 2")
+ } else if member.UserId != userId {
+ t.Fatal("should've gotten member for user")
+ }
+
+ if result := <-ss.Channel().GetAllChannelMembersNotifyPropsForChannel(c2.Id, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ props := result.Data.(map[string]model.StringMap)
+ if len(props) == 0 {
+ t.Fatal("should not be empty")
+ }
+ }
+
+ if result := <-ss.Channel().GetAllChannelMembersNotifyPropsForChannel(c2.Id, true); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ props := result.Data.(map[string]model.StringMap)
+ if len(props) == 0 {
+ t.Fatal("should not be empty")
+ }
+ }
+
+ ss.Channel().InvalidateCacheForChannelMembersNotifyProps(c2.Id)
+}
+
+func TestChannelStoreGetMemberForPost(t *testing.T) {
+ ss := Setup()
+
+ o1 := store.Must(ss.Channel().Save(&model.Channel{
+ TeamId: model.NewId(),
+ DisplayName: "Name",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ })).(*model.Channel)
+
+ m1 := store.Must(ss.Channel().SaveMember(&model.ChannelMember{
+ ChannelId: o1.Id,
+ UserId: model.NewId(),
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ })).(*model.ChannelMember)
+
+ p1 := store.Must(ss.Post().Save(&model.Post{
+ UserId: model.NewId(),
+ ChannelId: o1.Id,
+ Message: "test",
+ })).(*model.Post)
+
+ if r1 := <-ss.Channel().GetMemberForPost(p1.Id, m1.UserId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else if r1.Data.(*model.ChannelMember).ToJson() != m1.ToJson() {
+ t.Fatal("invalid returned channel member")
+ }
+
+ if r2 := <-ss.Channel().GetMemberForPost(p1.Id, model.NewId()); r2.Err == nil {
+ t.Fatal("shouldn't have returned a member")
+ }
+}
+
+func TestGetMemberCount(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ c1 := model.Channel{
+ TeamId: teamId,
+ DisplayName: "Channel1",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(&c1))
+
+ c2 := model.Channel{
+ TeamId: teamId,
+ DisplayName: "Channel2",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(&c2))
+
+ u1 := &model.User{
+ Email: model.NewId(),
+ DeleteAt: 0,
+ }
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ m1 := model.ChannelMember{
+ ChannelId: c1.Id,
+ UserId: u1.Id,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ if result := <-ss.Channel().GetMemberCount(c1.Id, false); result.Err != nil {
+ t.Fatalf("failed to get member count: %v", result.Err)
+ } else if result.Data.(int64) != 1 {
+ t.Fatalf("got incorrect member count %v", result.Data)
+ }
+
+ u2 := model.User{
+ Email: model.NewId(),
+ DeleteAt: 0,
+ }
+ store.Must(ss.User().Save(&u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ m2 := model.ChannelMember{
+ ChannelId: c1.Id,
+ UserId: u2.Id,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ if result := <-ss.Channel().GetMemberCount(c1.Id, false); result.Err != nil {
+ t.Fatalf("failed to get member count: %v", result.Err)
+ } else if result.Data.(int64) != 2 {
+ t.Fatalf("got incorrect member count %v", result.Data)
+ }
+
+ // make sure members of other channels aren't counted
+ u3 := model.User{
+ Email: model.NewId(),
+ DeleteAt: 0,
+ }
+ store.Must(ss.User().Save(&u3))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u3.Id}))
+
+ m3 := model.ChannelMember{
+ ChannelId: c2.Id,
+ UserId: u3.Id,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ if result := <-ss.Channel().GetMemberCount(c1.Id, false); result.Err != nil {
+ t.Fatalf("failed to get member count: %v", result.Err)
+ } else if result.Data.(int64) != 2 {
+ t.Fatalf("got incorrect member count %v", result.Data)
+ }
+
+ // make sure inactive users aren't counted
+ u4 := &model.User{
+ Email: model.NewId(),
+ DeleteAt: 10000,
+ }
+ store.Must(ss.User().Save(u4))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u4.Id}))
+
+ m4 := model.ChannelMember{
+ ChannelId: c1.Id,
+ UserId: u4.Id,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(&m4))
+
+ if result := <-ss.Channel().GetMemberCount(c1.Id, false); result.Err != nil {
+ t.Fatalf("failed to get member count: %v", result.Err)
+ } else if result.Data.(int64) != 2 {
+ t.Fatalf("got incorrect member count %v", result.Data)
+ }
+}
+
+func TestUpdateExtrasByUser(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ c1 := model.Channel{
+ TeamId: teamId,
+ DisplayName: "Channel1",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(&c1))
+
+ c2 := model.Channel{
+ TeamId: teamId,
+ DisplayName: "Channel2",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ }
+ store.Must(ss.Channel().Save(&c2))
+
+ u1 := &model.User{
+ Email: model.NewId(),
+ DeleteAt: 0,
+ }
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ m1 := model.ChannelMember{
+ ChannelId: c1.Id,
+ UserId: u1.Id,
+ NotifyProps: model.GetDefaultChannelNotifyProps(),
+ }
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ u1.DeleteAt = model.GetMillis()
+ store.Must(ss.User().Update(u1, true))
+
+ if result := <-ss.Channel().ExtraUpdateByUser(u1.Id, u1.DeleteAt); result.Err != nil {
+ t.Fatalf("failed to update extras by user: %v", result.Err)
+ }
+
+ u1.DeleteAt = 0
+ store.Must(ss.User().Update(u1, true))
+
+ if result := <-ss.Channel().ExtraUpdateByUser(u1.Id, u1.DeleteAt); result.Err != nil {
+ t.Fatalf("failed to update extras by user: %v", result.Err)
+ }
+}
+
+func TestChannelStoreSearchMore(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "ChannelA"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o1.Id
+ m2.UserId = model.NewId()
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = o2.Id
+ m3.UserId = model.NewId()
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ o3 := model.Channel{}
+ o3.TeamId = o1.TeamId
+ o3.DisplayName = "ChannelA"
+ o3.Name = "zz" + model.NewId() + "b"
+ o3.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o3))
+
+ o4 := model.Channel{}
+ o4.TeamId = o1.TeamId
+ o4.DisplayName = "ChannelB"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o4))
+
+ o5 := model.Channel{}
+ o5.TeamId = o1.TeamId
+ o5.DisplayName = "ChannelC"
+ o5.Name = "zz" + model.NewId() + "b"
+ o5.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o5))
+
+ if result := <-ss.Channel().SearchMore(m1.UserId, o1.TeamId, "ChannelA"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ channels := result.Data.(*model.ChannelList)
+ if len(*channels) == 0 {
+ t.Fatal("should not be empty")
+ }
+
+ if (*channels)[0].Name != o3.Name {
+ t.Fatal("wrong channel returned")
+ }
+ }
+
+ if result := <-ss.Channel().SearchMore(m1.UserId, o1.TeamId, o4.Name); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ channels := result.Data.(*model.ChannelList)
+ if len(*channels) != 0 {
+ t.Fatal("should be empty")
+ }
+ }
+
+ if result := <-ss.Channel().SearchMore(m1.UserId, o1.TeamId, o3.Name); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ channels := result.Data.(*model.ChannelList)
+ if len(*channels) == 0 {
+ t.Fatal("should not be empty")
+ }
+
+ if (*channels)[0].Name != o3.Name {
+ t.Fatal("wrong channel returned")
+ }
+ }
+
+}
+
+func TestChannelStoreSearchInTeam(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "ChannelA"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = o1.Id
+ m1.UserId = model.NewId()
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = o1.Id
+ m2.UserId = model.NewId()
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = o2.Id
+ m3.UserId = model.NewId()
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ o3 := model.Channel{}
+ o3.TeamId = o1.TeamId
+ o3.DisplayName = "ChannelA"
+ o3.Name = "zz" + model.NewId() + "b"
+ o3.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o3))
+
+ o4 := model.Channel{}
+ o4.TeamId = o1.TeamId
+ o4.DisplayName = "ChannelB"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o4))
+
+ o5 := model.Channel{}
+ o5.TeamId = o1.TeamId
+ o5.DisplayName = "ChannelC"
+ o5.Name = "zz" + model.NewId() + "b"
+ o5.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&o5))
+
+ if result := <-ss.Channel().SearchInTeam(o1.TeamId, "ChannelA"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ channels := result.Data.(*model.ChannelList)
+ if len(*channels) != 2 {
+ t.Fatal("wrong length")
+ }
+ }
+
+ if result := <-ss.Channel().SearchInTeam(o1.TeamId, ""); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ channels := result.Data.(*model.ChannelList)
+ if len(*channels) == 0 {
+ t.Fatal("should not be empty")
+ }
+ }
+
+ if result := <-ss.Channel().SearchInTeam(o1.TeamId, "blargh"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ channels := result.Data.(*model.ChannelList)
+ if len(*channels) != 0 {
+ t.Fatal("should be empty")
+ }
+ }
+}
+
+func TestChannelStoreGetMembersByIds(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "ChannelA"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ m1 := &model.ChannelMember{ChannelId: o1.Id, UserId: model.NewId(), NotifyProps: model.GetDefaultChannelNotifyProps()}
+ store.Must(ss.Channel().SaveMember(m1))
+
+ if r := <-ss.Channel().GetMembersByIds(m1.ChannelId, []string{m1.UserId}); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ rm1 := (*r.Data.(*model.ChannelMembers))[0]
+
+ if rm1.ChannelId != m1.ChannelId {
+ t.Fatal("bad team id")
+ }
+
+ if rm1.UserId != m1.UserId {
+ t.Fatal("bad user id")
+ }
+ }
+
+ m2 := &model.ChannelMember{ChannelId: o1.Id, UserId: model.NewId(), NotifyProps: model.GetDefaultChannelNotifyProps()}
+ store.Must(ss.Channel().SaveMember(m2))
+
+ if r := <-ss.Channel().GetMembersByIds(m1.ChannelId, []string{m1.UserId, m2.UserId, model.NewId()}); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ rm := (*r.Data.(*model.ChannelMembers))
+
+ if len(rm) != 2 {
+ t.Fatal("return wrong number of results")
+ }
+ }
+
+ if r := <-ss.Channel().GetMembersByIds(m1.ChannelId, []string{}); r.Err == nil {
+ t.Fatal("empty user ids - should have failed")
+ }
+}
+
+func TestChannelStoreAnalyticsDeletedTypeCount(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Channel{}
+ o1.TeamId = model.NewId()
+ o1.DisplayName = "ChannelA"
+ o1.Name = "zz" + model.NewId() + "b"
+ o1.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o1))
+
+ o2 := model.Channel{}
+ o2.TeamId = model.NewId()
+ o2.DisplayName = "Channel2"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Type = model.CHANNEL_OPEN
+ store.Must(ss.Channel().Save(&o2))
+
+ p3 := model.Channel{}
+ p3.TeamId = model.NewId()
+ p3.DisplayName = "Channel3"
+ p3.Name = "zz" + model.NewId() + "b"
+ p3.Type = model.CHANNEL_PRIVATE
+ store.Must(ss.Channel().Save(&p3))
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Nickname = model.NewId()
+ store.Must(ss.User().Save(u1))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Nickname = model.NewId()
+ store.Must(ss.User().Save(u2))
+
+ var d4 *model.Channel
+ if result := <-ss.Channel().CreateDirectChannel(u1.Id, u2.Id); result.Err != nil {
+ t.Fatalf(result.Err.Error())
+ } else {
+ d4 = result.Data.(*model.Channel)
+ }
+
+ var openStartCount int64
+ if result := <-ss.Channel().AnalyticsDeletedTypeCount("", "O"); result.Err != nil {
+ t.Fatal(result.Err.Error())
+ } else {
+ openStartCount = result.Data.(int64)
+ }
+
+ var privateStartCount int64
+ if result := <-ss.Channel().AnalyticsDeletedTypeCount("", "P"); result.Err != nil {
+ t.Fatal(result.Err.Error())
+ } else {
+ privateStartCount = result.Data.(int64)
+ }
+
+ var directStartCount int64
+ if result := <-ss.Channel().AnalyticsDeletedTypeCount("", "D"); result.Err != nil {
+ t.Fatal(result.Err.Error())
+ } else {
+ directStartCount = result.Data.(int64)
+ }
+
+ store.Must(ss.Channel().Delete(o1.Id, model.GetMillis()))
+ store.Must(ss.Channel().Delete(o2.Id, model.GetMillis()))
+ store.Must(ss.Channel().Delete(p3.Id, model.GetMillis()))
+ store.Must(ss.Channel().Delete(d4.Id, model.GetMillis()))
+
+ if result := <-ss.Channel().AnalyticsDeletedTypeCount("", "O"); result.Err != nil {
+ t.Fatal(result.Err.Error())
+ } else {
+ if result.Data.(int64) != openStartCount+2 {
+ t.Fatalf("Wrong open channel deleted count.")
+ }
+ }
+
+ if result := <-ss.Channel().AnalyticsDeletedTypeCount("", "P"); result.Err != nil {
+ t.Fatal(result.Err.Error())
+ } else {
+ if result.Data.(int64) != privateStartCount+1 {
+ t.Fatalf("Wrong private channel deleted count.")
+ }
+ }
+
+ if result := <-ss.Channel().AnalyticsDeletedTypeCount("", "D"); result.Err != nil {
+ t.Fatal(result.Err.Error())
+ } else {
+ if result.Data.(int64) != directStartCount+1 {
+ t.Fatalf("Wrong direct channel deleted count.")
+ }
+ }
+}
+
+func TestChannelStoreGetPinnedPosts(t *testing.T) {
+ ss := Setup()
+
+ o1 := store.Must(ss.Channel().Save(&model.Channel{
+ TeamId: model.NewId(),
+ DisplayName: "Name",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ })).(*model.Channel)
+
+ p1 := store.Must(ss.Post().Save(&model.Post{
+ UserId: model.NewId(),
+ ChannelId: o1.Id,
+ Message: "test",
+ IsPinned: true,
+ })).(*model.Post)
+
+ if r1 := <-ss.Channel().GetPinnedPosts(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else if r1.Data.(*model.PostList).Posts[p1.Id] == nil {
+ t.Fatal("didn't return relevant pinned posts")
+ }
+
+ o2 := store.Must(ss.Channel().Save(&model.Channel{
+ TeamId: model.NewId(),
+ DisplayName: "Name",
+ Name: "zz" + model.NewId() + "b",
+ Type: model.CHANNEL_OPEN,
+ })).(*model.Channel)
+
+ store.Must(ss.Post().Save(&model.Post{
+ UserId: model.NewId(),
+ ChannelId: o2.Id,
+ Message: "test",
+ }))
+
+ if r2 := <-ss.Channel().GetPinnedPosts(o2.Id); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else if len(r2.Data.(*model.PostList).Posts) != 0 {
+ t.Fatal("wasn't supposed to return posts")
+ }
+}
diff --git a/store/sqlstore/cluster_discovery_store.go b/store/sqlstore/cluster_discovery_store.go
new file mode 100644
index 000000000..4c7d2706b
--- /dev/null
+++ b/store/sqlstore/cluster_discovery_store.go
@@ -0,0 +1,229 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type sqlClusterDiscoveryStore struct {
+ SqlStore
+}
+
+func NewSqlClusterDiscoveryStore(sqlStore SqlStore) store.ClusterDiscoveryStore {
+ s := &sqlClusterDiscoveryStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.ClusterDiscovery{}, "ClusterDiscovery").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Type").SetMaxSize(64)
+ table.ColMap("ClusterName").SetMaxSize(64)
+ table.ColMap("Hostname").SetMaxSize(512)
+ }
+
+ return s
+}
+
+func (s sqlClusterDiscoveryStore) Save(ClusterDiscovery *model.ClusterDiscovery) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ ClusterDiscovery.PreSave()
+ if result.Err = ClusterDiscovery.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(ClusterDiscovery); err != nil {
+ result.Err = model.NewAppError("SqlClusterDiscoveryStore.Save", "Failed to save ClusterDiscovery row", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s sqlClusterDiscoveryStore) Delete(ClusterDiscovery *model.ClusterDiscovery) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ result.Data = false
+
+ if count, err := s.GetMaster().SelectInt(
+ `
+ DELETE
+ FROM
+ ClusterDiscovery
+ WHERE
+ Type = :Type
+ AND ClusterName = :ClusterName
+ AND Hostname = :Hostname
+ `,
+ map[string]interface{}{
+ "Type": ClusterDiscovery.Type,
+ "ClusterName": ClusterDiscovery.ClusterName,
+ "Hostname": ClusterDiscovery.Hostname,
+ },
+ ); err != nil {
+ result.Err = model.NewAppError("SqlClusterDiscoveryStore.Delete", "Failed to delete", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ if count > 0 {
+ result.Data = true
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s sqlClusterDiscoveryStore) Exists(ClusterDiscovery *model.ClusterDiscovery) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ result.Data = false
+
+ if count, err := s.GetMaster().SelectInt(
+ `
+ SELECT
+ COUNT(*)
+ FROM
+ ClusterDiscovery
+ WHERE
+ Type = :Type
+ AND ClusterName = :ClusterName
+ AND Hostname = :Hostname
+ `,
+ map[string]interface{}{
+ "Type": ClusterDiscovery.Type,
+ "ClusterName": ClusterDiscovery.ClusterName,
+ "Hostname": ClusterDiscovery.Hostname,
+ },
+ ); err != nil {
+ result.Err = model.NewAppError("SqlClusterDiscoveryStore.Exists", "Failed to check if it exists", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ if count > 0 {
+ result.Data = true
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s sqlClusterDiscoveryStore) GetAll(ClusterDiscoveryType, clusterName string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ lastPingAt := model.GetMillis() - model.CDS_OFFLINE_AFTER_MILLIS
+
+ var list []*model.ClusterDiscovery
+ if _, err := s.GetMaster().Select(
+ &list,
+ `
+ SELECT
+ *
+ FROM
+ ClusterDiscovery
+ WHERE
+ Type = :ClusterDiscoveryType
+ AND ClusterName = :ClusterName
+ AND LastPingAt > :LastPingAt
+ `,
+ map[string]interface{}{
+ "ClusterDiscoveryType": ClusterDiscoveryType,
+ "ClusterName": clusterName,
+ "LastPingAt": lastPingAt,
+ },
+ ); err != nil {
+ result.Err = model.NewAppError("SqlClusterDiscoveryStore.GetAllForType", "Failed to get all disoery rows", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = list
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s sqlClusterDiscoveryStore) SetLastPingAt(ClusterDiscovery *model.ClusterDiscovery) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec(
+ `
+ UPDATE ClusterDiscovery
+ SET
+ LastPingAt = :LastPingAt
+ WHERE
+ Type = :Type
+ AND ClusterName = :ClusterName
+ AND Hostname = :Hostname
+ `,
+ map[string]interface{}{
+ "LastPingAt": model.GetMillis(),
+ "Type": ClusterDiscovery.Type,
+ "ClusterName": ClusterDiscovery.ClusterName,
+ "Hostname": ClusterDiscovery.Hostname,
+ },
+ ); err != nil {
+ result.Err = model.NewAppError("SqlClusterDiscoveryStore.GetAllForType", "Failed to update last ping at", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s sqlClusterDiscoveryStore) Cleanup() store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec(
+ `
+ DELETE FROM ClusterDiscovery
+ WHERE
+ LastPingAt < :LastPingAt
+ `,
+ map[string]interface{}{
+ "LastPingAt": model.GetMillis() - model.CDS_OFFLINE_AFTER_MILLIS,
+ },
+ ); err != nil {
+ result.Err = model.NewAppError("SqlClusterDiscoveryStore.Save", "Failed to save ClusterDiscovery row", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/cluster_discovery_store_test.go b/store/sqlstore/cluster_discovery_store_test.go
new file mode 100644
index 000000000..35207cd56
--- /dev/null
+++ b/store/sqlstore/cluster_discovery_store_test.go
@@ -0,0 +1,202 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestSqlClusterDiscoveryStore(t *testing.T) {
+ ss := Setup()
+
+ discovery := &model.ClusterDiscovery{
+ ClusterName: "cluster_name",
+ Hostname: "hostname" + model.NewId(),
+ Type: "test_test",
+ }
+
+ if result := <-ss.ClusterDiscovery().Save(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.ClusterDiscovery().Cleanup(); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestSqlClusterDiscoveryStoreDelete(t *testing.T) {
+ ss := Setup()
+
+ discovery := &model.ClusterDiscovery{
+ ClusterName: "cluster_name",
+ Hostname: "hostname" + model.NewId(),
+ Type: "test_test",
+ }
+
+ if result := <-ss.ClusterDiscovery().Save(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.ClusterDiscovery().Delete(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestSqlClusterDiscoveryStoreLastPing(t *testing.T) {
+ ss := Setup()
+
+ discovery := &model.ClusterDiscovery{
+ ClusterName: "cluster_name_lastPing",
+ Hostname: "hostname" + model.NewId(),
+ Type: "test_test_lastPing" + model.NewId(),
+ }
+
+ if result := <-ss.ClusterDiscovery().Save(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.ClusterDiscovery().SetLastPingAt(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ttime := model.GetMillis()
+
+ time.Sleep(1 * time.Second)
+
+ if result := <-ss.ClusterDiscovery().SetLastPingAt(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.ClusterDiscovery().GetAll(discovery.Type, "cluster_name_lastPing"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ list := result.Data.([]*model.ClusterDiscovery)
+
+ if len(list) != 1 {
+ t.Fatal("should only be 1 items")
+ return
+ }
+
+ if list[0].LastPingAt-ttime < 500 {
+ t.Fatal("failed to set time")
+ }
+ }
+
+ discovery2 := &model.ClusterDiscovery{
+ ClusterName: "cluster_name_missing",
+ Hostname: "hostname" + model.NewId(),
+ Type: "test_test_missing",
+ }
+
+ if result := <-ss.ClusterDiscovery().SetLastPingAt(discovery2); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestSqlClusterDiscoveryStoreExists(t *testing.T) {
+ ss := Setup()
+
+ discovery := &model.ClusterDiscovery{
+ ClusterName: "cluster_name_Exists",
+ Hostname: "hostname" + model.NewId(),
+ Type: "test_test_Exists" + model.NewId(),
+ }
+
+ if result := <-ss.ClusterDiscovery().Save(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.ClusterDiscovery().Exists(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ val := result.Data.(bool)
+ if !val {
+ t.Fatal("should be true")
+ }
+ }
+
+ discovery.ClusterName = "cluster_name_Exists2"
+
+ if result := <-ss.ClusterDiscovery().Exists(discovery); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ val := result.Data.(bool)
+ if val {
+ t.Fatal("should be true")
+ }
+ }
+}
+
+func TestSqlClusterDiscoveryGetStore(t *testing.T) {
+ ss := Setup()
+
+ testType1 := model.NewId()
+
+ discovery1 := &model.ClusterDiscovery{
+ ClusterName: "cluster_name",
+ Hostname: "hostname1",
+ Type: testType1,
+ }
+ store.Must(ss.ClusterDiscovery().Save(discovery1))
+
+ discovery2 := &model.ClusterDiscovery{
+ ClusterName: "cluster_name",
+ Hostname: "hostname2",
+ Type: testType1,
+ }
+ store.Must(ss.ClusterDiscovery().Save(discovery2))
+
+ discovery3 := &model.ClusterDiscovery{
+ ClusterName: "cluster_name",
+ Hostname: "hostname3",
+ Type: testType1,
+ CreateAt: 1,
+ LastPingAt: 1,
+ }
+ store.Must(ss.ClusterDiscovery().Save(discovery3))
+
+ testType2 := model.NewId()
+
+ discovery4 := &model.ClusterDiscovery{
+ ClusterName: "cluster_name",
+ Hostname: "hostname1",
+ Type: testType2,
+ }
+ store.Must(ss.ClusterDiscovery().Save(discovery4))
+
+ if result := <-ss.ClusterDiscovery().GetAll(testType1, "cluster_name"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ list := result.Data.([]*model.ClusterDiscovery)
+
+ if len(list) != 2 {
+ t.Fatal("Should only have returned 2")
+ }
+ }
+
+ if result := <-ss.ClusterDiscovery().GetAll(testType2, "cluster_name"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ list := result.Data.([]*model.ClusterDiscovery)
+
+ if len(list) != 1 {
+ t.Fatal("Should only have returned 1")
+ }
+ }
+
+ if result := <-ss.ClusterDiscovery().GetAll(model.NewId(), "cluster_name"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ list := result.Data.([]*model.ClusterDiscovery)
+
+ if len(list) != 0 {
+ t.Fatal("shouldn't be any")
+ }
+ }
+}
diff --git a/store/sqlstore/command_store.go b/store/sqlstore/command_store.go
new file mode 100644
index 000000000..f156daab1
--- /dev/null
+++ b/store/sqlstore/command_store.go
@@ -0,0 +1,226 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlCommandStore struct {
+ SqlStore
+}
+
+func NewSqlCommandStore(sqlStore SqlStore) store.CommandStore {
+ s := &SqlCommandStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ tableo := db.AddTableWithName(model.Command{}, "Commands").SetKeys(false, "Id")
+ tableo.ColMap("Id").SetMaxSize(26)
+ tableo.ColMap("Token").SetMaxSize(26)
+ tableo.ColMap("CreatorId").SetMaxSize(26)
+ tableo.ColMap("TeamId").SetMaxSize(26)
+ tableo.ColMap("Trigger").SetMaxSize(128)
+ tableo.ColMap("URL").SetMaxSize(1024)
+ tableo.ColMap("Method").SetMaxSize(1)
+ tableo.ColMap("Username").SetMaxSize(64)
+ tableo.ColMap("IconURL").SetMaxSize(1024)
+ tableo.ColMap("AutoCompleteDesc").SetMaxSize(1024)
+ tableo.ColMap("AutoCompleteHint").SetMaxSize(1024)
+ tableo.ColMap("DisplayName").SetMaxSize(64)
+ tableo.ColMap("Description").SetMaxSize(128)
+ }
+
+ return s
+}
+
+func (s SqlCommandStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_command_team_id", "Commands", "TeamId")
+ s.CreateIndexIfNotExists("idx_command_update_at", "Commands", "UpdateAt")
+ s.CreateIndexIfNotExists("idx_command_create_at", "Commands", "CreateAt")
+ s.CreateIndexIfNotExists("idx_command_delete_at", "Commands", "DeleteAt")
+}
+
+func (s SqlCommandStore) Save(command *model.Command) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(command.Id) > 0 {
+ result.Err = model.NewAppError("SqlCommandStore.Save", "store.sql_command.save.saving_overwrite.app_error", nil, "id="+command.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ command.PreSave()
+ if result.Err = command.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(command); err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.Save", "store.sql_command.save.saving.app_error", nil, "id="+command.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = command
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) Get(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var command model.Command
+
+ if err := s.GetReplica().SelectOne(&command, "SELECT * FROM Commands WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": id}); err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.Get", "store.sql_command.save.get.app_error", nil, "id="+id+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = &command
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) GetByTeam(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var commands []*model.Command
+
+ if _, err := s.GetReplica().Select(&commands, "SELECT * FROM Commands WHERE TeamId = :TeamId AND DeleteAt = 0", map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.GetByTeam", "store.sql_command.save.get_team.app_error", nil, "teamId="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = commands
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) Delete(commandId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("Update Commands SET DeleteAt = :DeleteAt, UpdateAt = :UpdateAt WHERE Id = :Id", map[string]interface{}{"DeleteAt": time, "UpdateAt": time, "Id": commandId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.Delete", "store.sql_command.save.delete.app_error", nil, "id="+commandId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) PermanentDeleteByTeam(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM Commands WHERE TeamId = :TeamId", map[string]interface{}{"TeamId": teamId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.DeleteByTeam", "store.sql_command.save.delete_perm.app_error", nil, "id="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) PermanentDeleteByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM Commands WHERE CreatorId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.DeleteByUser", "store.sql_command.save.delete_perm.app_error", nil, "id="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) Update(cmd *model.Command) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ cmd.UpdateAt = model.GetMillis()
+
+ if _, err := s.GetMaster().Update(cmd); err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.Update", "store.sql_command.save.update.app_error", nil, "id="+cmd.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = cmd
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandStore) AnalyticsCommandCount(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT
+ COUNT(*)
+ FROM
+ Commands
+ WHERE
+ DeleteAt = 0`
+
+ if len(teamId) > 0 {
+ query += " AND TeamId = :TeamId"
+ }
+
+ if c, err := s.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlCommandStore.AnalyticsCommandCount", "store.sql_command.analytics_command_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = c
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/command_store_test.go b/store/sqlstore/command_store_test.go
new file mode 100644
index 000000000..85eed64cc
--- /dev/null
+++ b/store/sqlstore/command_store_test.go
@@ -0,0 +1,221 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+)
+
+func TestCommandStoreSave(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ if err := (<-ss.Command().Save(&o1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Command().Save(&o1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+}
+
+func TestCommandStoreGet(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ if r1 := <-ss.Command().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Command).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned command")
+ }
+ }
+
+ if err := (<-ss.Command().Get("123")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestCommandStoreGetByTeam(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ if r1 := <-ss.Command().GetByTeam(o1.TeamId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.([]*model.Command)[0].CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned command")
+ }
+ }
+
+ if result := <-ss.Command().GetByTeam("123"); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if len(result.Data.([]*model.Command)) != 0 {
+ t.Fatal("no commands should have returned")
+ }
+ }
+}
+
+func TestCommandStoreDelete(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ if r1 := <-ss.Command().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Command).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned command")
+ }
+ }
+
+ if r2 := <-ss.Command().Delete(o1.Id, model.GetMillis()); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Command().Get(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestCommandStoreDeleteByTeam(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ if r1 := <-ss.Command().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Command).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned command")
+ }
+ }
+
+ if r2 := <-ss.Command().PermanentDeleteByTeam(o1.TeamId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Command().Get(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestCommandStoreDeleteByUser(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ if r1 := <-ss.Command().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Command).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned command")
+ }
+ }
+
+ if r2 := <-ss.Command().PermanentDeleteByUser(o1.CreatorId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Command().Get(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestCommandStoreUpdate(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ o1.Token = model.NewId()
+
+ if r2 := <-ss.Command().Update(o1); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+}
+
+func TestCommandCount(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Command{}
+ o1.CreatorId = model.NewId()
+ o1.Method = model.COMMAND_METHOD_POST
+ o1.TeamId = model.NewId()
+ o1.URL = "http://nowhere.com/"
+ o1.Trigger = "trigger"
+
+ o1 = (<-ss.Command().Save(o1)).Data.(*model.Command)
+
+ if r1 := <-ss.Command().AnalyticsCommandCount(""); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) == 0 {
+ t.Fatal("should be at least 1 command")
+ }
+ }
+
+ if r2 := <-ss.Command().AnalyticsCommandCount(o1.TeamId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if r2.Data.(int64) != 1 {
+ t.Fatal("should be 1 command")
+ }
+ }
+}
diff --git a/store/sqlstore/command_webhook_store.go b/store/sqlstore/command_webhook_store.go
new file mode 100644
index 000000000..dc1ad0732
--- /dev/null
+++ b/store/sqlstore/command_webhook_store.go
@@ -0,0 +1,125 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+
+ l4g "github.com/alecthomas/log4go"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlCommandWebhookStore struct {
+ SqlStore
+}
+
+func NewSqlCommandWebhookStore(sqlStore SqlStore) store.CommandWebhookStore {
+ s := &SqlCommandWebhookStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ tablec := db.AddTableWithName(model.CommandWebhook{}, "CommandWebhooks").SetKeys(false, "Id")
+ tablec.ColMap("Id").SetMaxSize(26)
+ tablec.ColMap("CommandId").SetMaxSize(26)
+ tablec.ColMap("UserId").SetMaxSize(26)
+ tablec.ColMap("ChannelId").SetMaxSize(26)
+ tablec.ColMap("RootId").SetMaxSize(26)
+ tablec.ColMap("ParentId").SetMaxSize(26)
+ }
+
+ return s
+}
+
+func (s SqlCommandWebhookStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_command_webhook_create_at", "CommandWebhooks", "CreateAt")
+}
+
+func (s SqlCommandWebhookStore) Save(webhook *model.CommandWebhook) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(webhook.Id) > 0 {
+ result.Err = model.NewAppError("SqlCommandWebhookStore.Save", "store.sql_command_webhooks.save.existing.app_error", nil, "id="+webhook.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ webhook.PreSave()
+ if result.Err = webhook.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(webhook); err != nil {
+ result.Err = model.NewAppError("SqlCommandWebhookStore.Save", "store.sql_command_webhooks.save.app_error", nil, "id="+webhook.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = webhook
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandWebhookStore) Get(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhook model.CommandWebhook
+
+ exptime := model.GetMillis() - model.COMMAND_WEBHOOK_LIFETIME
+ if err := s.GetReplica().SelectOne(&webhook, "SELECT * FROM CommandWebhooks WHERE Id = :Id AND CreateAt > :ExpTime", map[string]interface{}{"Id": id, "ExpTime": exptime}); err != nil {
+ result.Err = model.NewAppError("SqlCommandWebhookStore.Get", "store.sql_command_webhooks.get.app_error", nil, "id="+id+", err="+err.Error(), http.StatusInternalServerError)
+ if err == sql.ErrNoRows {
+ result.Err.StatusCode = http.StatusNotFound
+ }
+ }
+
+ result.Data = &webhook
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandWebhookStore) TryUse(id string, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if sqlResult, err := s.GetMaster().Exec("UPDATE CommandWebhooks SET UseCount = UseCount + 1 WHERE Id = :Id AND UseCount < :UseLimit", map[string]interface{}{"Id": id, "UseLimit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlCommandWebhookStore.TryUse", "store.sql_command_webhooks.try_use.app_error", nil, "id="+id+", err="+err.Error(), http.StatusInternalServerError)
+ } else if rows, _ := sqlResult.RowsAffected(); rows == 0 {
+ result.Err = model.NewAppError("SqlCommandWebhookStore.TryUse", "store.sql_command_webhooks.try_use.invalid.app_error", nil, "id="+id, http.StatusBadRequest)
+ }
+
+ result.Data = id
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlCommandWebhookStore) Cleanup() {
+ l4g.Debug("Cleaning up command webhook store.")
+ exptime := model.GetMillis() - model.COMMAND_WEBHOOK_LIFETIME
+ if _, err := s.GetMaster().Exec("DELETE FROM CommandWebhooks WHERE CreateAt < :ExpTime", map[string]interface{}{"ExpTime": exptime}); err != nil {
+ l4g.Error("Unable to cleanup command webhook store.")
+ }
+}
diff --git a/store/sqlstore/command_webhook_store_test.go b/store/sqlstore/command_webhook_store_test.go
new file mode 100644
index 000000000..cc286757f
--- /dev/null
+++ b/store/sqlstore/command_webhook_store_test.go
@@ -0,0 +1,65 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+)
+
+func TestCommandWebhookStore(t *testing.T) {
+ ss := Setup()
+
+ cws := ss.CommandWebhook()
+
+ h1 := &model.CommandWebhook{}
+ h1.CommandId = model.NewId()
+ h1.UserId = model.NewId()
+ h1.ChannelId = model.NewId()
+ h1 = (<-cws.Save(h1)).Data.(*model.CommandWebhook)
+
+ if r1 := <-cws.Get(h1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if *r1.Data.(*model.CommandWebhook) != *h1 {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if err := (<-cws.Get("123")).Err; err.StatusCode != http.StatusNotFound {
+ t.Fatal("Should have set the status as not found for missing id")
+ }
+
+ h2 := &model.CommandWebhook{}
+ h2.CreateAt = model.GetMillis() - 2*model.COMMAND_WEBHOOK_LIFETIME
+ h2.CommandId = model.NewId()
+ h2.UserId = model.NewId()
+ h2.ChannelId = model.NewId()
+ h2 = (<-cws.Save(h2)).Data.(*model.CommandWebhook)
+
+ if err := (<-cws.Get(h2.Id)).Err; err == nil || err.StatusCode != http.StatusNotFound {
+ t.Fatal("Should have set the status as not found for expired webhook")
+ }
+
+ cws.Cleanup()
+
+ if err := (<-cws.Get(h1.Id)).Err; err != nil {
+ t.Fatal("Should have no error getting unexpired webhook")
+ }
+
+ if err := (<-cws.Get(h2.Id)).Err; err.StatusCode != http.StatusNotFound {
+ t.Fatal("Should have set the status as not found for expired webhook")
+ }
+
+ if err := (<-cws.TryUse(h1.Id, 1)).Err; err != nil {
+ t.Fatal("Should be able to use webhook once")
+ }
+
+ if err := (<-cws.TryUse(h1.Id, 1)).Err; err == nil || err.StatusCode != http.StatusBadRequest {
+ t.Fatal("Should be able to use webhook once")
+ }
+}
diff --git a/store/sqlstore/compliance_store.go b/store/sqlstore/compliance_store.go
new file mode 100644
index 000000000..95da94673
--- /dev/null
+++ b/store/sqlstore/compliance_store.go
@@ -0,0 +1,267 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+ "strconv"
+ "strings"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlComplianceStore struct {
+ SqlStore
+}
+
+func NewSqlComplianceStore(sqlStore SqlStore) store.ComplianceStore {
+ s := &SqlComplianceStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Compliance{}, "Compliances").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("Status").SetMaxSize(64)
+ table.ColMap("Desc").SetMaxSize(512)
+ table.ColMap("Type").SetMaxSize(64)
+ table.ColMap("Keywords").SetMaxSize(512)
+ table.ColMap("Emails").SetMaxSize(1024)
+ }
+
+ return s
+}
+
+func (s SqlComplianceStore) CreateIndexesIfNotExists() {
+}
+
+func (s SqlComplianceStore) Save(compliance *model.Compliance) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ compliance.PreSave()
+ if result.Err = compliance.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(compliance); err != nil {
+ result.Err = model.NewAppError("SqlComplianceStore.Save", "store.sql_compliance.save.saving.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = compliance
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlComplianceStore) Update(compliance *model.Compliance) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if result.Err = compliance.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if _, err := us.GetMaster().Update(compliance); err != nil {
+ result.Err = model.NewAppError("SqlComplianceStore.Update", "store.sql_compliance.save.saving.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = compliance
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlComplianceStore) GetAll(offset, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := "SELECT * FROM Compliances ORDER BY CreateAt DESC LIMIT :Limit OFFSET :Offset"
+
+ var compliances model.Compliances
+ if _, err := s.GetReplica().Select(&compliances, query, map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlComplianceStore.Get", "store.sql_compliance.get.finding.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = compliances
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlComplianceStore) Get(id string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if obj, err := us.GetReplica().Get(model.Compliance{}, id); err != nil {
+ result.Err = model.NewAppError("SqlComplianceStore.Get", "store.sql_compliance.get.finding.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlComplianceStore.Get", "store.sql_compliance.get.finding.app_error", nil, err.Error(), http.StatusNotFound)
+ } else {
+ result.Data = obj.(*model.Compliance)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (s SqlComplianceStore) ComplianceExport(job *model.Compliance) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ props := map[string]interface{}{"StartTime": job.StartAt, "EndTime": job.EndAt}
+
+ keywordQuery := ""
+ keywords := strings.Fields(strings.TrimSpace(strings.ToLower(strings.Replace(job.Keywords, ",", " ", -1))))
+ if len(keywords) > 0 {
+
+ keywordQuery = "AND ("
+
+ for index, keyword := range keywords {
+ if index >= 1 {
+ keywordQuery += " OR LOWER(Posts.Message) LIKE :Keyword" + strconv.Itoa(index)
+ } else {
+ keywordQuery += "LOWER(Posts.Message) LIKE :Keyword" + strconv.Itoa(index)
+ }
+
+ props["Keyword"+strconv.Itoa(index)] = "%" + keyword + "%"
+ }
+
+ keywordQuery += ")"
+ }
+
+ emailQuery := ""
+ emails := strings.Fields(strings.TrimSpace(strings.ToLower(strings.Replace(job.Emails, ",", " ", -1))))
+ if len(emails) > 0 {
+
+ emailQuery = "AND ("
+
+ for index, email := range emails {
+ if index >= 1 {
+ emailQuery += " OR Users.Email = :Email" + strconv.Itoa(index)
+ } else {
+ emailQuery += "Users.Email = :Email" + strconv.Itoa(index)
+ }
+
+ props["Email"+strconv.Itoa(index)] = email
+ }
+
+ emailQuery += ")"
+ }
+
+ query :=
+ `(SELECT
+ Teams.Name AS TeamName,
+ Teams.DisplayName AS TeamDisplayName,
+ Channels.Name AS ChannelName,
+ Channels.DisplayName AS ChannelDisplayName,
+ Users.Username AS UserUsername,
+ Users.Email AS UserEmail,
+ Users.Nickname AS UserNickname,
+ Posts.Id AS PostId,
+ Posts.CreateAt AS PostCreateAt,
+ Posts.UpdateAt AS PostUpdateAt,
+ Posts.DeleteAt AS PostDeleteAt,
+ Posts.RootId AS PostRootId,
+ Posts.ParentId AS PostParentId,
+ Posts.OriginalId AS PostOriginalId,
+ Posts.Message AS PostMessage,
+ Posts.Type AS PostType,
+ Posts.Props AS PostProps,
+ Posts.Hashtags AS PostHashtags,
+ Posts.FileIds AS PostFileIds
+ FROM
+ Teams,
+ Channels,
+ Users,
+ Posts
+ WHERE
+ Teams.Id = Channels.TeamId
+ AND Posts.ChannelId = Channels.Id
+ AND Posts.UserId = Users.Id
+ AND Posts.CreateAt > :StartTime
+ AND Posts.CreateAt <= :EndTime
+ ` + emailQuery + `
+ ` + keywordQuery + `)
+ UNION ALL
+ (SELECT
+ 'direct-messages' AS TeamName,
+ 'Direct Messages' AS TeamDisplayName,
+ Channels.Name AS ChannelName,
+ Channels.DisplayName AS ChannelDisplayName,
+ Users.Username AS UserUsername,
+ Users.Email AS UserEmail,
+ Users.Nickname AS UserNickname,
+ Posts.Id AS PostId,
+ Posts.CreateAt AS PostCreateAt,
+ Posts.UpdateAt AS PostUpdateAt,
+ Posts.DeleteAt AS PostDeleteAt,
+ Posts.RootId AS PostRootId,
+ Posts.ParentId AS PostParentId,
+ Posts.OriginalId AS PostOriginalId,
+ Posts.Message AS PostMessage,
+ Posts.Type AS PostType,
+ Posts.Props AS PostProps,
+ Posts.Hashtags AS PostHashtags,
+ Posts.FileIds AS PostFileIds
+ FROM
+ Channels,
+ Users,
+ Posts
+ WHERE
+ Channels.TeamId = ''
+ AND Posts.ChannelId = Channels.Id
+ AND Posts.UserId = Users.Id
+ AND Posts.CreateAt > :StartTime
+ AND Posts.CreateAt <= :EndTime
+ ` + emailQuery + `
+ ` + keywordQuery + `)
+ ORDER BY PostCreateAt
+ LIMIT 30000`
+
+ var cposts []*model.CompliancePost
+
+ if _, err := s.GetReplica().Select(&cposts, query, props); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.ComplianceExport", "store.sql_post.compliance_export.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = cposts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/compliance_store_test.go b/store/sqlstore/compliance_store_test.go
new file mode 100644
index 000000000..7c7bfaf8f
--- /dev/null
+++ b/store/sqlstore/compliance_store_test.go
@@ -0,0 +1,318 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestSqlComplianceStore(t *testing.T) {
+ ss := Setup()
+
+ compliance1 := &model.Compliance{Desc: "Audit for federal subpoena case #22443", UserId: model.NewId(), Status: model.COMPLIANCE_STATUS_FAILED, StartAt: model.GetMillis() - 1, EndAt: model.GetMillis() + 1, Type: model.COMPLIANCE_TYPE_ADHOC}
+ store.Must(ss.Compliance().Save(compliance1))
+ time.Sleep(100 * time.Millisecond)
+
+ compliance2 := &model.Compliance{Desc: "Audit for federal subpoena case #11458", UserId: model.NewId(), Status: model.COMPLIANCE_STATUS_RUNNING, StartAt: model.GetMillis() - 1, EndAt: model.GetMillis() + 1, Type: model.COMPLIANCE_TYPE_ADHOC}
+ store.Must(ss.Compliance().Save(compliance2))
+ time.Sleep(100 * time.Millisecond)
+
+ c := ss.Compliance().GetAll(0, 1000)
+ result := <-c
+ compliances := result.Data.(model.Compliances)
+
+ if compliances[0].Status != model.COMPLIANCE_STATUS_RUNNING && compliance2.Id != compliances[0].Id {
+ t.Fatal()
+ }
+
+ compliance2.Status = model.COMPLIANCE_STATUS_FAILED
+ store.Must(ss.Compliance().Update(compliance2))
+
+ c = ss.Compliance().GetAll(0, 1000)
+ result = <-c
+ compliances = result.Data.(model.Compliances)
+
+ if compliances[0].Status != model.COMPLIANCE_STATUS_FAILED && compliance2.Id != compliances[0].Id {
+ t.Fatal()
+ }
+
+ c = ss.Compliance().GetAll(0, 1)
+ result = <-c
+ compliances = result.Data.(model.Compliances)
+
+ if len(compliances) != 1 {
+ t.Fatal("should only have returned 1")
+ }
+
+ c = ss.Compliance().GetAll(1, 1)
+ result = <-c
+ compliances = result.Data.(model.Compliances)
+
+ if len(compliances) != 1 {
+ t.Fatal("should only have returned 1")
+ }
+
+ rc2 := (<-ss.Compliance().Get(compliance2.Id)).Data.(*model.Compliance)
+ if rc2.Status != compliance2.Status {
+ t.Fatal()
+ }
+}
+
+func TestComplianceExport(t *testing.T) {
+ ss := Setup()
+
+ time.Sleep(100 * time.Millisecond)
+
+ t1 := &model.Team{}
+ t1.DisplayName = "DisplayName"
+ t1.Name = "zz" + model.NewId() + "b"
+ t1.Email = model.NewId() + "@nowhere.com"
+ t1.Type = model.TEAM_OPEN
+ t1 = store.Must(ss.Team().Save(t1)).(*model.Team)
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+ u1 = store.Must(ss.User().Save(u1)).(*model.User)
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: t1.Id, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Username = model.NewId()
+ u2 = store.Must(ss.User().Save(u2)).(*model.User)
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: t1.Id, UserId: u2.Id}))
+
+ c1 := &model.Channel{}
+ c1.TeamId = t1.Id
+ c1.DisplayName = "Channel2"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = store.Must(ss.Channel().Save(c1)).(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = u1.Id
+ o1.CreateAt = model.GetMillis()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = store.Must(ss.Post().Save(o1)).(*model.Post)
+
+ o1a := &model.Post{}
+ o1a.ChannelId = c1.Id
+ o1a.UserId = u1.Id
+ o1a.CreateAt = o1.CreateAt + 10
+ o1a.Message = "zz" + model.NewId() + "b"
+ o1a = store.Must(ss.Post().Save(o1a)).(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c1.Id
+ o2.UserId = u1.Id
+ o2.CreateAt = o1.CreateAt + 20
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = store.Must(ss.Post().Save(o2)).(*model.Post)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = c1.Id
+ o2a.UserId = u2.Id
+ o2a.CreateAt = o1.CreateAt + 30
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a = store.Must(ss.Post().Save(o2a)).(*model.Post)
+
+ time.Sleep(100 * time.Millisecond)
+
+ cr1 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o2a.CreateAt + 1}
+ if r1 := <-ss.Compliance().ComplianceExport(cr1); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 4 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o1.Id {
+ t.Fatal("Wrong sort")
+ }
+
+ if cposts[3].PostId != o2a.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+
+ cr2 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o2a.CreateAt + 1, Emails: u2.Email}
+ if r1 := <-ss.Compliance().ComplianceExport(cr2); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 1 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o2a.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+
+ cr3 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o2a.CreateAt + 1, Emails: u2.Email + ", " + u1.Email}
+ if r1 := <-ss.Compliance().ComplianceExport(cr3); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 4 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o1.Id {
+ t.Fatal("Wrong sort")
+ }
+
+ if cposts[3].PostId != o2a.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+
+ cr4 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o2a.CreateAt + 1, Keywords: o2a.Message}
+ if r1 := <-ss.Compliance().ComplianceExport(cr4); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 1 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o2a.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+
+ cr5 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o2a.CreateAt + 1, Keywords: o2a.Message + " " + o1.Message}
+ if r1 := <-ss.Compliance().ComplianceExport(cr5); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 2 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o1.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+
+ cr6 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o2a.CreateAt + 1, Emails: u2.Email + ", " + u1.Email, Keywords: o2a.Message + " " + o1.Message}
+ if r1 := <-ss.Compliance().ComplianceExport(cr6); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 2 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o1.Id {
+ t.Fatal("Wrong sort")
+ }
+
+ if cposts[1].PostId != o2a.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+}
+
+func TestComplianceExportDirectMessages(t *testing.T) {
+ ss := Setup()
+
+ time.Sleep(100 * time.Millisecond)
+
+ t1 := &model.Team{}
+ t1.DisplayName = "DisplayName"
+ t1.Name = "zz" + model.NewId() + "b"
+ t1.Email = model.NewId() + "@nowhere.com"
+ t1.Type = model.TEAM_OPEN
+ t1 = store.Must(ss.Team().Save(t1)).(*model.Team)
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+ u1 = store.Must(ss.User().Save(u1)).(*model.User)
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: t1.Id, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Username = model.NewId()
+ u2 = store.Must(ss.User().Save(u2)).(*model.User)
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: t1.Id, UserId: u2.Id}))
+
+ c1 := &model.Channel{}
+ c1.TeamId = t1.Id
+ c1.DisplayName = "Channel2"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = store.Must(ss.Channel().Save(c1)).(*model.Channel)
+
+ cDM := store.Must(ss.Channel().CreateDirectChannel(u1.Id, u2.Id)).(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = u1.Id
+ o1.CreateAt = model.GetMillis()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = store.Must(ss.Post().Save(o1)).(*model.Post)
+
+ o1a := &model.Post{}
+ o1a.ChannelId = c1.Id
+ o1a.UserId = u1.Id
+ o1a.CreateAt = o1.CreateAt + 10
+ o1a.Message = "zz" + model.NewId() + "b"
+ o1a = store.Must(ss.Post().Save(o1a)).(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c1.Id
+ o2.UserId = u1.Id
+ o2.CreateAt = o1.CreateAt + 20
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = store.Must(ss.Post().Save(o2)).(*model.Post)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = c1.Id
+ o2a.UserId = u2.Id
+ o2a.CreateAt = o1.CreateAt + 30
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a = store.Must(ss.Post().Save(o2a)).(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = cDM.Id
+ o3.UserId = u1.Id
+ o3.CreateAt = o1.CreateAt + 40
+ o3.Message = "zz" + model.NewId() + "b"
+ o3 = store.Must(ss.Post().Save(o3)).(*model.Post)
+
+ time.Sleep(100 * time.Millisecond)
+
+ cr1 := &model.Compliance{Desc: "test" + model.NewId(), StartAt: o1.CreateAt - 1, EndAt: o3.CreateAt + 1, Emails: u1.Email}
+ if r1 := <-ss.Compliance().ComplianceExport(cr1); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ cposts := r1.Data.([]*model.CompliancePost)
+
+ if len(cposts) != 4 {
+ t.Fatal("return wrong results length")
+ }
+
+ if cposts[0].PostId != o1.Id {
+ t.Fatal("Wrong sort")
+ }
+
+ if cposts[len(cposts)-1].PostId != o3.Id {
+ t.Fatal("Wrong sort")
+ }
+ }
+}
diff --git a/store/sqlstore/emoji_store.go b/store/sqlstore/emoji_store.go
new file mode 100644
index 000000000..5842af2f3
--- /dev/null
+++ b/store/sqlstore/emoji_store.go
@@ -0,0 +1,212 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+const (
+ EMOJI_CACHE_SIZE = 5000
+ EMOJI_CACHE_SEC = 1800 // 30 mins
+)
+
+var emojiCache *utils.Cache = utils.NewLru(EMOJI_CACHE_SIZE)
+
+type SqlEmojiStore struct {
+ SqlStore
+ metrics einterfaces.MetricsInterface
+}
+
+func NewSqlEmojiStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.EmojiStore {
+ s := &SqlEmojiStore{
+ SqlStore: sqlStore,
+ metrics: metrics,
+ }
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Emoji{}, "Emoji").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("CreatorId").SetMaxSize(26)
+ table.ColMap("Name").SetMaxSize(64)
+
+ table.SetUniqueTogether("Name", "DeleteAt")
+ }
+
+ return s
+}
+
+func (es SqlEmojiStore) CreateIndexesIfNotExists() {
+ es.CreateIndexIfNotExists("idx_emoji_update_at", "Emoji", "UpdateAt")
+ es.CreateIndexIfNotExists("idx_emoji_create_at", "Emoji", "CreateAt")
+ es.CreateIndexIfNotExists("idx_emoji_delete_at", "Emoji", "DeleteAt")
+}
+
+func (es SqlEmojiStore) Save(emoji *model.Emoji) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ emoji.PreSave()
+ if result.Err = emoji.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := es.GetMaster().Insert(emoji); err != nil {
+ result.Err = model.NewAppError("SqlEmojiStore.Save", "store.sql_emoji.save.app_error", nil, "id="+emoji.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = emoji
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (es SqlEmojiStore) Get(id string, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := emojiCache.Get(id); ok {
+ if es.metrics != nil {
+ es.metrics.IncrementMemCacheHitCounter("Emoji")
+ }
+ result.Data = cacheItem.(*model.Emoji)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if es.metrics != nil {
+ es.metrics.IncrementMemCacheMissCounter("Emoji")
+ }
+ }
+ } else {
+ if es.metrics != nil {
+ es.metrics.IncrementMemCacheMissCounter("Emoji")
+ }
+ }
+
+ var emoji *model.Emoji
+
+ if err := es.GetReplica().SelectOne(&emoji,
+ `SELECT
+ *
+ FROM
+ Emoji
+ WHERE
+ Id = :Id
+ AND DeleteAt = 0`, map[string]interface{}{"Id": id}); err != nil {
+ result.Err = model.NewAppError("SqlEmojiStore.Get", "store.sql_emoji.get.app_error", nil, "id="+id+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Data = emoji
+
+ if allowFromCache {
+ emojiCache.AddWithExpiresInSecs(id, emoji, EMOJI_CACHE_SEC)
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (es SqlEmojiStore) GetByName(name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var emoji *model.Emoji
+
+ if err := es.GetReplica().SelectOne(&emoji,
+ `SELECT
+ *
+ FROM
+ Emoji
+ WHERE
+ Name = :Name
+ AND DeleteAt = 0`, map[string]interface{}{"Name": name}); err != nil {
+ result.Err = model.NewAppError("SqlEmojiStore.GetByName", "store.sql_emoji.get_by_name.app_error", nil, "name="+name+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = emoji
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (es SqlEmojiStore) GetList(offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var emoji []*model.Emoji
+
+ if _, err := es.GetReplica().Select(&emoji,
+ `SELECT
+ *
+ FROM
+ Emoji
+ WHERE
+ DeleteAt = 0
+ LIMIT :Limit OFFSET :Offset`, map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlEmojiStore.GetList", "store.sql_emoji.get_all.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = emoji
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (es SqlEmojiStore) Delete(id string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if sqlResult, err := es.GetMaster().Exec(
+ `Update
+ Emoji
+ SET
+ DeleteAt = :DeleteAt,
+ UpdateAt = :UpdateAt
+ WHERE
+ Id = :Id
+ AND DeleteAt = 0`, map[string]interface{}{"DeleteAt": time, "UpdateAt": time, "Id": id}); err != nil {
+ result.Err = model.NewAppError("SqlEmojiStore.Delete", "store.sql_emoji.delete.app_error", nil, "id="+id+", err="+err.Error(), http.StatusInternalServerError)
+ } else if rows, _ := sqlResult.RowsAffected(); rows == 0 {
+ result.Err = model.NewAppError("SqlEmojiStore.Delete", "store.sql_emoji.delete.no_results", nil, "id="+id+", err="+err.Error(), http.StatusBadRequest)
+ }
+
+ emojiCache.Remove(id)
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/emoji_store_test.go b/store/sqlstore/emoji_store_test.go
new file mode 100644
index 000000000..39673a503
--- /dev/null
+++ b/store/sqlstore/emoji_store_test.go
@@ -0,0 +1,176 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestEmojiSaveDelete(t *testing.T) {
+ ss := Setup()
+
+ emoji1 := &model.Emoji{
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ }
+
+ if result := <-ss.Emoji().Save(emoji1); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if len(emoji1.Id) != 26 {
+ t.Fatal("should've set id for emoji")
+ }
+
+ emoji2 := model.Emoji{
+ CreatorId: model.NewId(),
+ Name: emoji1.Name,
+ }
+ if result := <-ss.Emoji().Save(&emoji2); result.Err == nil {
+ t.Fatal("shouldn't be able to save emoji with duplicate name")
+ }
+
+ if result := <-ss.Emoji().Delete(emoji1.Id, time.Now().Unix()); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.Emoji().Save(&emoji2); result.Err != nil {
+ t.Fatal("should be able to save emoji with duplicate name now that original has been deleted", result.Err)
+ }
+
+ if result := <-ss.Emoji().Delete(emoji2.Id, time.Now().Unix()+1); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestEmojiGet(t *testing.T) {
+ ss := Setup()
+
+ emojis := []model.Emoji{
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ }
+
+ for i, emoji := range emojis {
+ emojis[i] = *store.Must(ss.Emoji().Save(&emoji)).(*model.Emoji)
+ }
+ defer func() {
+ for _, emoji := range emojis {
+ store.Must(ss.Emoji().Delete(emoji.Id, time.Now().Unix()))
+ }
+ }()
+
+ for _, emoji := range emojis {
+ if result := <-ss.Emoji().Get(emoji.Id, false); result.Err != nil {
+ t.Fatalf("failed to get emoji with id %v: %v", emoji.Id, result.Err)
+ }
+ }
+
+ for _, emoji := range emojis {
+ if result := <-ss.Emoji().Get(emoji.Id, true); result.Err != nil {
+ t.Fatalf("failed to get emoji with id %v: %v", emoji.Id, result.Err)
+ }
+ }
+
+ for _, emoji := range emojis {
+ if result := <-ss.Emoji().Get(emoji.Id, true); result.Err != nil {
+ t.Fatalf("failed to get emoji with id %v: %v", emoji.Id, result.Err)
+ }
+ }
+}
+
+func TestEmojiGetByName(t *testing.T) {
+ ss := Setup()
+
+ emojis := []model.Emoji{
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ }
+
+ for i, emoji := range emojis {
+ emojis[i] = *store.Must(ss.Emoji().Save(&emoji)).(*model.Emoji)
+ }
+ defer func() {
+ for _, emoji := range emojis {
+ store.Must(ss.Emoji().Delete(emoji.Id, time.Now().Unix()))
+ }
+ }()
+
+ for _, emoji := range emojis {
+ if result := <-ss.Emoji().GetByName(emoji.Name); result.Err != nil {
+ t.Fatalf("failed to get emoji with name %v: %v", emoji.Name, result.Err)
+ }
+ }
+}
+
+func TestEmojiGetList(t *testing.T) {
+ ss := Setup()
+
+ emojis := []model.Emoji{
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ {
+ CreatorId: model.NewId(),
+ Name: model.NewId(),
+ },
+ }
+
+ for i, emoji := range emojis {
+ emojis[i] = *store.Must(ss.Emoji().Save(&emoji)).(*model.Emoji)
+ }
+ defer func() {
+ for _, emoji := range emojis {
+ store.Must(ss.Emoji().Delete(emoji.Id, time.Now().Unix()))
+ }
+ }()
+
+ if result := <-ss.Emoji().GetList(0, 100); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ for _, emoji := range emojis {
+ found := false
+
+ for _, savedEmoji := range result.Data.([]*model.Emoji) {
+ if emoji.Id == savedEmoji.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatalf("failed to get emoji with id %v", emoji.Id)
+ }
+ }
+ }
+}
diff --git a/store/sqlstore/file_info_store.go b/store/sqlstore/file_info_store.go
new file mode 100644
index 000000000..cc8b4fb2f
--- /dev/null
+++ b/store/sqlstore/file_info_store.go
@@ -0,0 +1,317 @@
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlFileInfoStore struct {
+ SqlStore
+ metrics einterfaces.MetricsInterface
+}
+
+const (
+ FILE_INFO_CACHE_SIZE = 25000
+ FILE_INFO_CACHE_SEC = 1800 // 30 minutes
+)
+
+var fileInfoCache *utils.Cache = utils.NewLru(FILE_INFO_CACHE_SIZE)
+
+func ClearFileCaches() {
+ fileInfoCache.Purge()
+}
+
+func NewSqlFileInfoStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.FileInfoStore {
+ s := &SqlFileInfoStore{
+ SqlStore: sqlStore,
+ metrics: metrics,
+ }
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.FileInfo{}, "FileInfo").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("CreatorId").SetMaxSize(26)
+ table.ColMap("PostId").SetMaxSize(26)
+ table.ColMap("Path").SetMaxSize(512)
+ table.ColMap("ThumbnailPath").SetMaxSize(512)
+ table.ColMap("PreviewPath").SetMaxSize(512)
+ table.ColMap("Name").SetMaxSize(256)
+ table.ColMap("Extension").SetMaxSize(64)
+ table.ColMap("MimeType").SetMaxSize(256)
+ }
+
+ return s
+}
+
+func (fs SqlFileInfoStore) CreateIndexesIfNotExists() {
+ fs.CreateIndexIfNotExists("idx_fileinfo_update_at", "FileInfo", "UpdateAt")
+ fs.CreateIndexIfNotExists("idx_fileinfo_create_at", "FileInfo", "CreateAt")
+ fs.CreateIndexIfNotExists("idx_fileinfo_delete_at", "FileInfo", "DeleteAt")
+ fs.CreateIndexIfNotExists("idx_fileinfo_postid_at", "FileInfo", "PostId")
+}
+
+func (fs SqlFileInfoStore) Save(info *model.FileInfo) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ info.PreSave()
+ if result.Err = info.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := fs.GetMaster().Insert(info); err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.Save", "store.sql_file_info.save.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = info
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (fs SqlFileInfoStore) Get(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ info := &model.FileInfo{}
+
+ if err := fs.GetReplica().SelectOne(info,
+ `SELECT
+ *
+ FROM
+ FileInfo
+ WHERE
+ Id = :Id
+ AND DeleteAt = 0`, map[string]interface{}{"Id": id}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlFileInfoStore.Get", "store.sql_file_info.get.app_error", nil, "id="+id+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlFileInfoStore.Get", "store.sql_file_info.get.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = info
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (fs SqlFileInfoStore) GetByPath(path string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ info := &model.FileInfo{}
+
+ if err := fs.GetReplica().SelectOne(info,
+ `SELECT
+ *
+ FROM
+ FileInfo
+ WHERE
+ Path = :Path
+ AND DeleteAt = 0
+ LIMIT 1`, map[string]interface{}{"Path": path}); err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.GetByPath", "store.sql_file_info.get_by_path.app_error", nil, "path="+path+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = info
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (fs SqlFileInfoStore) InvalidateFileInfosForPostCache(postId string) {
+ fileInfoCache.Remove(postId)
+}
+
+func (fs SqlFileInfoStore) GetForPost(postId string, readFromMaster bool, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := fileInfoCache.Get(postId); ok {
+ if fs.metrics != nil {
+ fs.metrics.IncrementMemCacheHitCounter("File Info Cache")
+ }
+
+ result.Data = cacheItem.([]*model.FileInfo)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if fs.metrics != nil {
+ fs.metrics.IncrementMemCacheMissCounter("File Info Cache")
+ }
+ }
+ } else {
+ if fs.metrics != nil {
+ fs.metrics.IncrementMemCacheMissCounter("File Info Cache")
+ }
+ }
+
+ var infos []*model.FileInfo
+
+ dbmap := fs.GetReplica()
+
+ if readFromMaster {
+ dbmap = fs.GetMaster()
+ }
+
+ if _, err := dbmap.Select(&infos,
+ `SELECT
+ *
+ FROM
+ FileInfo
+ WHERE
+ PostId = :PostId
+ AND DeleteAt = 0
+ ORDER BY
+ CreateAt`, map[string]interface{}{"PostId": postId}); err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.GetForPost",
+ "store.sql_file_info.get_for_post.app_error", nil, "post_id="+postId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ if len(infos) > 0 {
+ fileInfoCache.AddWithExpiresInSecs(postId, infos, FILE_INFO_CACHE_SEC)
+ }
+
+ result.Data = infos
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (fs SqlFileInfoStore) AttachToPost(fileId, postId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := fs.GetMaster().Exec(
+ `UPDATE
+ FileInfo
+ SET
+ PostId = :PostId
+ WHERE
+ Id = :Id
+ AND PostId = ''`, map[string]interface{}{"PostId": postId, "Id": fileId}); err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.AttachToPost",
+ "store.sql_file_info.attach_to_post.app_error", nil, "post_id="+postId+", file_id="+fileId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (fs SqlFileInfoStore) DeleteForPost(postId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := fs.GetMaster().Exec(
+ `UPDATE
+ FileInfo
+ SET
+ DeleteAt = :DeleteAt
+ WHERE
+ PostId = :PostId`, map[string]interface{}{"DeleteAt": model.GetMillis(), "PostId": postId}); err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.DeleteForPost",
+ "store.sql_file_info.delete_for_post.app_error", nil, "post_id="+postId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = postId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (fs SqlFileInfoStore) PermanentDelete(fileId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := fs.GetMaster().Exec(
+ `DELETE FROM
+ FileInfo
+ WHERE
+ Id = :FileId`, map[string]interface{}{"FileId": fileId}); err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.PermanentDelete",
+ "store.sql_file_info.permanent_delete.app_error", nil, "file_id="+fileId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlFileInfoStore) PermanentDeleteBatch(endTime int64, limit int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var query string
+ if *utils.Cfg.SqlSettings.DriverName == "postgres" {
+ query = "DELETE from FileInfo WHERE Id = any (array (SELECT Id FROM FileInfo WHERE CreateAt < :EndTime LIMIT :Limit))"
+ } else {
+ query = "DELETE from FileInfo WHERE CreateAt < :EndTime LIMIT :Limit"
+ }
+
+ sqlResult, err := s.GetMaster().Exec(query, map[string]interface{}{"EndTime": endTime, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.PermanentDeleteBatch", "store.sql_file_info.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ } else {
+ rowsAffected, err1 := sqlResult.RowsAffected()
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlFileInfoStore.PermanentDeleteBatch", "store.sql_file_info.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ result.Data = int64(0)
+ } else {
+ result.Data = rowsAffected
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/file_info_store_test.go b/store/sqlstore/file_info_store_test.go
new file mode 100644
index 000000000..626fe8c6a
--- /dev/null
+++ b/store/sqlstore/file_info_store_test.go
@@ -0,0 +1,300 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestFileInfoSaveGet(t *testing.T) {
+ ss := Setup()
+
+ info := &model.FileInfo{
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ }
+
+ if result := <-ss.FileInfo().Save(info); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.(*model.FileInfo); len(returned.Id) == 0 {
+ t.Fatal("should've assigned an id to FileInfo")
+ } else {
+ info = returned
+ }
+ defer func() {
+ <-ss.FileInfo().PermanentDelete(info.Id)
+ }()
+
+ if result := <-ss.FileInfo().Get(info.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.(*model.FileInfo); returned.Id != info.Id {
+ t.Log(info)
+ t.Log(returned)
+ t.Fatal("should've returned correct FileInfo")
+ }
+
+ info2 := store.Must(ss.FileInfo().Save(&model.FileInfo{
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ DeleteAt: 123,
+ })).(*model.FileInfo)
+
+ if result := <-ss.FileInfo().Get(info2.Id); result.Err == nil {
+ t.Fatal("shouldn't have gotten deleted file")
+ }
+ defer func() {
+ <-ss.FileInfo().PermanentDelete(info2.Id)
+ }()
+}
+
+func TestFileInfoSaveGetByPath(t *testing.T) {
+ ss := Setup()
+
+ info := &model.FileInfo{
+ CreatorId: model.NewId(),
+ Path: fmt.Sprintf("%v/file.txt", model.NewId()),
+ }
+
+ if result := <-ss.FileInfo().Save(info); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.(*model.FileInfo); len(returned.Id) == 0 {
+ t.Fatal("should've assigned an id to FileInfo")
+ } else {
+ info = returned
+ }
+ defer func() {
+ <-ss.FileInfo().PermanentDelete(info.Id)
+ }()
+
+ if result := <-ss.FileInfo().GetByPath(info.Path); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.(*model.FileInfo); returned.Id != info.Id {
+ t.Log(info)
+ t.Log(returned)
+ t.Fatal("should've returned correct FileInfo")
+ }
+
+ info2 := store.Must(ss.FileInfo().Save(&model.FileInfo{
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ DeleteAt: 123,
+ })).(*model.FileInfo)
+
+ if result := <-ss.FileInfo().GetByPath(info2.Id); result.Err == nil {
+ t.Fatal("shouldn't have gotten deleted file")
+ }
+ defer func() {
+ <-ss.FileInfo().PermanentDelete(info2.Id)
+ }()
+}
+
+func TestFileInfoGetForPost(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ postId := model.NewId()
+
+ infos := []*model.FileInfo{
+ {
+ PostId: postId,
+ CreatorId: userId,
+ Path: "file.txt",
+ },
+ {
+ PostId: postId,
+ CreatorId: userId,
+ Path: "file.txt",
+ },
+ {
+ PostId: postId,
+ CreatorId: userId,
+ Path: "file.txt",
+ DeleteAt: 123,
+ },
+ {
+ PostId: model.NewId(),
+ CreatorId: userId,
+ Path: "file.txt",
+ },
+ }
+
+ for i, info := range infos {
+ infos[i] = store.Must(ss.FileInfo().Save(info)).(*model.FileInfo)
+ defer func(id string) {
+ <-ss.FileInfo().PermanentDelete(id)
+ }(infos[i].Id)
+ }
+
+ if result := <-ss.FileInfo().GetForPost(postId, true, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.([]*model.FileInfo); len(returned) != 2 {
+ t.Fatal("should've returned exactly 2 file infos")
+ }
+
+ if result := <-ss.FileInfo().GetForPost(postId, false, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.([]*model.FileInfo); len(returned) != 2 {
+ t.Fatal("should've returned exactly 2 file infos")
+ }
+
+ if result := <-ss.FileInfo().GetForPost(postId, true, true); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.([]*model.FileInfo); len(returned) != 2 {
+ t.Fatal("should've returned exactly 2 file infos")
+ }
+}
+
+func TestFileInfoAttachToPost(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ postId := model.NewId()
+
+ info1 := store.Must(ss.FileInfo().Save(&model.FileInfo{
+ CreatorId: userId,
+ Path: "file.txt",
+ })).(*model.FileInfo)
+ defer func() {
+ <-ss.FileInfo().PermanentDelete(info1.Id)
+ }()
+
+ if len(info1.PostId) != 0 {
+ t.Fatal("file shouldn't have a PostId")
+ }
+
+ if result := <-ss.FileInfo().AttachToPost(info1.Id, postId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ info1 = store.Must(ss.FileInfo().Get(info1.Id)).(*model.FileInfo)
+ }
+
+ if len(info1.PostId) == 0 {
+ t.Fatal("file should now have a PostId")
+ }
+
+ info2 := store.Must(ss.FileInfo().Save(&model.FileInfo{
+ CreatorId: userId,
+ Path: "file.txt",
+ })).(*model.FileInfo)
+ defer func() {
+ <-ss.FileInfo().PermanentDelete(info2.Id)
+ }()
+
+ if result := <-ss.FileInfo().AttachToPost(info2.Id, postId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ info2 = store.Must(ss.FileInfo().Get(info2.Id)).(*model.FileInfo)
+ }
+
+ if result := <-ss.FileInfo().GetForPost(postId, true, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if infos := result.Data.([]*model.FileInfo); len(infos) != 2 {
+ t.Fatal("should've returned exactly 2 file infos")
+ }
+}
+
+func TestFileInfoDeleteForPost(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ postId := model.NewId()
+
+ infos := []*model.FileInfo{
+ {
+ PostId: postId,
+ CreatorId: userId,
+ Path: "file.txt",
+ },
+ {
+ PostId: postId,
+ CreatorId: userId,
+ Path: "file.txt",
+ },
+ {
+ PostId: postId,
+ CreatorId: userId,
+ Path: "file.txt",
+ DeleteAt: 123,
+ },
+ {
+ PostId: model.NewId(),
+ CreatorId: userId,
+ Path: "file.txt",
+ },
+ }
+
+ for i, info := range infos {
+ infos[i] = store.Must(ss.FileInfo().Save(info)).(*model.FileInfo)
+ defer func(id string) {
+ <-ss.FileInfo().PermanentDelete(id)
+ }(infos[i].Id)
+ }
+
+ if result := <-ss.FileInfo().DeleteForPost(postId); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if infos := store.Must(ss.FileInfo().GetForPost(postId, true, false)).([]*model.FileInfo); len(infos) != 0 {
+ t.Fatal("shouldn't have returned any file infos")
+ }
+}
+
+func TestFileInfoPermanentDelete(t *testing.T) {
+ ss := Setup()
+
+ info := store.Must(ss.FileInfo().Save(&model.FileInfo{
+ PostId: model.NewId(),
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ })).(*model.FileInfo)
+
+ if result := <-ss.FileInfo().PermanentDelete(info.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestFileInfoPermanentDeleteBatch(t *testing.T) {
+ ss := Setup()
+
+ postId := model.NewId()
+
+ store.Must(ss.FileInfo().Save(&model.FileInfo{
+ PostId: postId,
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ CreateAt: 1000,
+ }))
+
+ store.Must(ss.FileInfo().Save(&model.FileInfo{
+ PostId: postId,
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ CreateAt: 1200,
+ }))
+
+ store.Must(ss.FileInfo().Save(&model.FileInfo{
+ PostId: postId,
+ CreatorId: model.NewId(),
+ Path: "file.txt",
+ CreateAt: 2000,
+ }))
+
+ if result := <-ss.FileInfo().GetForPost(postId, true, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if len(result.Data.([]*model.FileInfo)) != 3 {
+ t.Fatal("Expected 3 fileInfos")
+ }
+
+ store.Must(ss.FileInfo().PermanentDeleteBatch(1500, 1000))
+
+ if result := <-ss.FileInfo().GetForPost(postId, true, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if len(result.Data.([]*model.FileInfo)) != 1 {
+ t.Fatal("Expected 3 fileInfos")
+ }
+}
diff --git a/store/sqlstore/job_store.go b/store/sqlstore/job_store.go
new file mode 100644
index 000000000..c56f526af
--- /dev/null
+++ b/store/sqlstore/job_store.go
@@ -0,0 +1,349 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlJobStore struct {
+ SqlStore
+}
+
+func NewSqlJobStore(sqlStore SqlStore) store.JobStore {
+ s := &SqlJobStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Job{}, "Jobs").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Type").SetMaxSize(32)
+ table.ColMap("Status").SetMaxSize(32)
+ table.ColMap("Data").SetMaxSize(1024)
+ }
+
+ return s
+}
+
+func (jss SqlJobStore) CreateIndexesIfNotExists() {
+ jss.CreateIndexIfNotExists("idx_jobs_type", "Jobs", "Type")
+}
+
+func (jss SqlJobStore) Save(job *model.Job) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ if err := jss.GetMaster().Insert(job); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.Save", "store.sql_job.save.app_error", nil, "id="+job.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = job
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) UpdateOptimistically(job *model.Job, currentStatus string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if sqlResult, err := jss.GetMaster().Exec(
+ `UPDATE
+ Jobs
+ SET
+ LastActivityAt = :LastActivityAt,
+ Status = :Status,
+ Progress = :Progress,
+ Data = :Data
+ WHERE
+ Id = :Id
+ AND
+ Status = :OldStatus`,
+ map[string]interface{}{
+ "Id": job.Id,
+ "OldStatus": currentStatus,
+ "LastActivityAt": model.GetMillis(),
+ "Status": job.Status,
+ "Data": job.DataToJson(),
+ "Progress": job.Progress,
+ }); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.UpdateOptimistically", "store.sql_job.update.app_error", nil, "id="+job.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ rows, err := sqlResult.RowsAffected()
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlJobStore.UpdateStatus", "store.sql_job.update.app_error", nil, "id="+job.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ if rows == 1 {
+ result.Data = true
+ } else {
+ result.Data = false
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) UpdateStatus(id string, status string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ job := &model.Job{
+ Id: id,
+ Status: status,
+ LastActivityAt: model.GetMillis(),
+ }
+
+ if _, err := jss.GetMaster().UpdateColumns(func(col *gorp.ColumnMap) bool {
+ return col.ColumnName == "Status" || col.ColumnName == "LastActivityAt"
+ }, job); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.UpdateStatus", "store.sql_job.update.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ if result.Err == nil {
+ result.Data = job
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) UpdateStatusOptimistically(id string, currentStatus string, newStatus string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var startAtClause string
+ if newStatus == model.JOB_STATUS_IN_PROGRESS {
+ startAtClause = `StartAt = :StartAt,`
+ }
+
+ if sqlResult, err := jss.GetMaster().Exec(
+ `UPDATE
+ Jobs
+ SET `+startAtClause+`
+ Status = :NewStatus,
+ LastActivityAt = :LastActivityAt
+ WHERE
+ Id = :Id
+ AND
+ Status = :OldStatus`, map[string]interface{}{"Id": id, "OldStatus": currentStatus, "NewStatus": newStatus, "StartAt": model.GetMillis(), "LastActivityAt": model.GetMillis()}); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.UpdateStatus", "store.sql_job.update.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ rows, err := sqlResult.RowsAffected()
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlJobStore.UpdateStatus", "store.sql_job.update.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ if rows == 1 {
+ result.Data = true
+ } else {
+ result.Data = false
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) Get(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var status *model.Job
+
+ if err := jss.GetReplica().SelectOne(&status,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Id = :Id`, map[string]interface{}{"Id": id}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlJobStore.Get", "store.sql_job.get.app_error", nil, "Id="+id+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlJobStore.Get", "store.sql_job.get.app_error", nil, "Id="+id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = status
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllPage(offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ ORDER BY
+ CreateAt DESC
+ LIMIT
+ :Limit
+ OFFSET
+ :Offset`, map[string]interface{}{"Limit": limit, "Offset": offset}); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.GetAllPage", "store.sql_job.get_all.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllByType(jobType string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Type = :Type
+ ORDER BY
+ CreateAt DESC`, map[string]interface{}{"Type": jobType}); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.GetAllByType", "store.sql_job.get_all.app_error", nil, "Type="+jobType+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllByTypePage(jobType string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Type = :Type
+ ORDER BY
+ CreateAt DESC
+ LIMIT
+ :Limit
+ OFFSET
+ :Offset`, map[string]interface{}{"Type": jobType, "Limit": limit, "Offset": offset}); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.GetAllByTypePage", "store.sql_job.get_all.app_error", nil, "Type="+jobType+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllByStatus(status string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Status = :Status
+ ORDER BY
+ CreateAt ASC`, map[string]interface{}{"Status": status}); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.GetAllByStatus", "store.sql_job.get_all.app_error", nil, "Status="+status+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) Delete(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := jss.GetMaster().Exec(
+ `DELETE FROM
+ Jobs
+ WHERE
+ Id = :Id`, map[string]interface{}{"Id": id}); err != nil {
+ result.Err = model.NewAppError("SqlJobStore.DeleteByType", "store.sql_job.delete.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = id
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/job_store_test.go b/store/sqlstore/job_store_test.go
new file mode 100644
index 000000000..bbeca6c3a
--- /dev/null
+++ b/store/sqlstore/job_store_test.go
@@ -0,0 +1,407 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+func TestJobSaveGet(t *testing.T) {
+ ss := Setup()
+
+ job := &model.Job{
+ Id: model.NewId(),
+ Type: model.NewId(),
+ Status: model.NewId(),
+ Data: map[string]string{
+ "Processed": "0",
+ "Total": "12345",
+ "LastProcessed": "abcd",
+ },
+ }
+
+ if result := <-ss.Job().Save(job); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ defer func() {
+ <-ss.Job().Delete(job.Id)
+ }()
+
+ if result := <-ss.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.(*model.Job); received.Id != job.Id {
+ t.Fatal("received incorrect job after save")
+ } else if received.Data["Total"] != "12345" {
+ t.Fatal("data field was not retrieved successfully:", received.Data)
+ }
+}
+
+func TestJobGetAllByType(t *testing.T) {
+ ss := Setup()
+
+ jobType := model.NewId()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ },
+ {
+ Id: model.NewId(),
+ Type: model.NewId(),
+ },
+ }
+
+ for _, job := range jobs {
+ store.Must(ss.Job().Save(job))
+ defer ss.Job().Delete(job.Id)
+ }
+
+ if result := <-ss.Job().GetAllByType(jobType); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 2 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[0].Id && received[1].Id != jobs[0].Id {
+ t.Fatal("should've received first jobs")
+ } else if received[0].Id != jobs[1].Id && received[1].Id != jobs[1].Id {
+ t.Fatal("should've received second jobs")
+ }
+}
+
+func TestJobGetAllByTypePage(t *testing.T) {
+ ss := Setup()
+
+ jobType := model.NewId()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1000,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 999,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1001,
+ },
+ {
+ Id: model.NewId(),
+ Type: model.NewId(),
+ CreateAt: 1002,
+ },
+ }
+
+ for _, job := range jobs {
+ store.Must(ss.Job().Save(job))
+ defer ss.Job().Delete(job.Id)
+ }
+
+ if result := <-ss.Job().GetAllByTypePage(jobType, 0, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 2 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[2].Id {
+ t.Fatal("should've received newest job first")
+ } else if received[1].Id != jobs[0].Id {
+ t.Fatal("should've received second newest job second")
+ }
+
+ if result := <-ss.Job().GetAllByTypePage(jobType, 2, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 1 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[1].Id {
+ t.Fatal("should've received oldest job last")
+ }
+}
+
+func TestJobGetAllPage(t *testing.T) {
+ ss := Setup()
+
+ jobType := model.NewId()
+ createAtTime := model.GetMillis()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: createAtTime + 1,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: createAtTime,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: createAtTime + 2,
+ },
+ }
+
+ for _, job := range jobs {
+ store.Must(ss.Job().Save(job))
+ defer ss.Job().Delete(job.Id)
+ }
+
+ if result := <-ss.Job().GetAllPage(0, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 2 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[2].Id {
+ t.Fatal("should've received newest job first")
+ } else if received[1].Id != jobs[0].Id {
+ t.Fatal("should've received second newest job second")
+ }
+
+ if result := <-ss.Job().GetAllPage(2, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) < 1 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[1].Id {
+ t.Fatal("should've received oldest job last")
+ }
+}
+
+func TestJobGetAllByStatus(t *testing.T) {
+ ss := Setup()
+
+ jobType := model.NewId()
+ status := model.NewId()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1000,
+ Status: status,
+ Data: map[string]string{
+ "test": "data",
+ },
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 999,
+ Status: status,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1001,
+ Status: status,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1002,
+ Status: model.NewId(),
+ },
+ }
+
+ for _, job := range jobs {
+ store.Must(ss.Job().Save(job))
+ defer ss.Job().Delete(job.Id)
+ }
+
+ if result := <-ss.Job().GetAllByStatus(status); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 3 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[1].Id || received[1].Id != jobs[0].Id || received[2].Id != jobs[2].Id {
+ t.Fatal("should've received jobs ordered by CreateAt time")
+ } else if received[1].Data["test"] != "data" {
+ t.Fatal("should've received job data field back as saved")
+ }
+}
+
+func TestJobUpdateOptimistically(t *testing.T) {
+ ss := Setup()
+
+ job := &model.Job{
+ Id: model.NewId(),
+ Type: model.JOB_TYPE_DATA_RETENTION,
+ CreateAt: model.GetMillis(),
+ Status: model.JOB_STATUS_PENDING,
+ }
+
+ if result := <-ss.Job().Save(job); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+ defer ss.Job().Delete(job.Id)
+
+ job.LastActivityAt = model.GetMillis()
+ job.Status = model.JOB_STATUS_IN_PROGRESS
+ job.Progress = 50
+ job.Data = map[string]string{
+ "Foo": "Bar",
+ }
+
+ if result := <-ss.Job().UpdateOptimistically(job, model.JOB_STATUS_SUCCESS); result.Err != nil {
+ if result.Data.(bool) {
+ t.Fatal("should have failed due to incorrect old status")
+ }
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-ss.Job().UpdateOptimistically(job, model.JOB_STATUS_PENDING); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if !result.Data.(bool) {
+ t.Fatal("Should have successfully updated")
+ }
+
+ var updatedJob *model.Job
+
+ if result := <-ss.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ updatedJob = result.Data.(*model.Job)
+ }
+
+ if updatedJob.Type != job.Type || updatedJob.CreateAt != job.CreateAt || updatedJob.Status != job.Status || updatedJob.LastActivityAt <= job.LastActivityAt || updatedJob.Progress != job.Progress || updatedJob.Data["Foo"] != job.Data["Foo"] {
+ t.Fatal("Some update property was not as expected")
+ }
+ }
+
+}
+
+func TestJobUpdateStatusUpdateStatusOptimistically(t *testing.T) {
+ ss := Setup()
+
+ job := &model.Job{
+ Id: model.NewId(),
+ Type: model.JOB_TYPE_DATA_RETENTION,
+ CreateAt: model.GetMillis(),
+ Status: model.JOB_STATUS_SUCCESS,
+ }
+
+ var lastUpdateAt int64
+ if result := <-ss.Job().Save(job); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ lastUpdateAt = result.Data.(*model.Job).LastActivityAt
+ }
+
+ defer ss.Job().Delete(job.Id)
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-ss.Job().UpdateStatus(job.Id, model.JOB_STATUS_PENDING); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_PENDING {
+ t.Fatal("status wasn't updated")
+ }
+ if received.LastActivityAt <= lastUpdateAt {
+ t.Fatal("lastActivityAt wasn't updated")
+ }
+ lastUpdateAt = received.LastActivityAt
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-ss.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_IN_PROGRESS, model.JOB_STATUS_SUCCESS); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if result.Data.(bool) {
+ t.Fatal("should be false due to incorrect original status")
+ }
+ }
+
+ if result := <-ss.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_PENDING {
+ t.Fatal("should still be pending")
+ }
+ if received.LastActivityAt != lastUpdateAt {
+ t.Fatal("last activity at shouldn't have changed")
+ }
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-ss.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_PENDING, model.JOB_STATUS_IN_PROGRESS); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if !result.Data.(bool) {
+ t.Fatal("should have succeeded")
+ }
+ }
+
+ var startAtSet int64
+ if result := <-ss.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_IN_PROGRESS {
+ t.Fatal("should be in progress")
+ }
+ if received.StartAt == 0 {
+ t.Fatal("received should have start at set")
+ }
+ if received.LastActivityAt <= lastUpdateAt {
+ t.Fatal("lastActivityAt wasn't updated")
+ }
+ lastUpdateAt = received.LastActivityAt
+ startAtSet = received.StartAt
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-ss.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_IN_PROGRESS, model.JOB_STATUS_SUCCESS); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if !result.Data.(bool) {
+ t.Fatal("should have succeeded")
+ }
+ }
+
+ if result := <-ss.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_SUCCESS {
+ t.Fatal("should be success status")
+ }
+ if received.StartAt != startAtSet {
+ t.Fatal("startAt should not have changed")
+ }
+ if received.LastActivityAt <= lastUpdateAt {
+ t.Fatal("lastActivityAt wasn't updated")
+ }
+ lastUpdateAt = received.LastActivityAt
+ }
+}
+
+func TestJobDelete(t *testing.T) {
+ ss := Setup()
+
+ job := store.Must(ss.Job().Save(&model.Job{
+ Id: model.NewId(),
+ })).(*model.Job)
+
+ if result := <-ss.Job().Delete(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
diff --git a/store/sqlstore/license_store.go b/store/sqlstore/license_store.go
new file mode 100644
index 000000000..5f3e91e88
--- /dev/null
+++ b/store/sqlstore/license_store.go
@@ -0,0 +1,83 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlLicenseStore struct {
+ SqlStore
+}
+
+func NewSqlLicenseStore(sqlStore SqlStore) store.LicenseStore {
+ ls := &SqlLicenseStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.LicenseRecord{}, "Licenses").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Bytes").SetMaxSize(10000)
+ }
+
+ return ls
+}
+
+func (ls SqlLicenseStore) CreateIndexesIfNotExists() {
+}
+
+func (ls SqlLicenseStore) Save(license *model.LicenseRecord) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ license.PreSave()
+ if result.Err = license.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ // Only insert if not exists
+ if err := ls.GetReplica().SelectOne(&model.LicenseRecord{}, "SELECT * FROM Licenses WHERE Id = :Id", map[string]interface{}{"Id": license.Id}); err != nil {
+ if err := ls.GetMaster().Insert(license); err != nil {
+ result.Err = model.NewAppError("SqlLicenseStore.Save", "store.sql_license.save.app_error", nil, "license_id="+license.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = license
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (ls SqlLicenseStore) Get(id string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if obj, err := ls.GetReplica().Get(model.LicenseRecord{}, id); err != nil {
+ result.Err = model.NewAppError("SqlLicenseStore.Get", "store.sql_license.get.app_error", nil, "license_id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlLicenseStore.Get", "store.sql_license.get.missing.app_error", nil, "license_id="+id, http.StatusNotFound)
+ } else {
+ result.Data = obj.(*model.LicenseRecord)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/license_store_test.go b/store/sqlstore/license_store_test.go
new file mode 100644
index 000000000..053b0ede1
--- /dev/null
+++ b/store/sqlstore/license_store_test.go
@@ -0,0 +1,55 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestLicenseStoreSave(t *testing.T) {
+ ss := Setup()
+
+ l1 := model.LicenseRecord{}
+ l1.Id = model.NewId()
+ l1.Bytes = "junk"
+
+ if err := (<-ss.License().Save(&l1)).Err; err != nil {
+ t.Fatal("couldn't save license record", err)
+ }
+
+ if err := (<-ss.License().Save(&l1)).Err; err != nil {
+ t.Fatal("shouldn't fail on trying to save existing license record", err)
+ }
+
+ l1.Id = ""
+
+ if err := (<-ss.License().Save(&l1)).Err; err == nil {
+ t.Fatal("should fail on invalid license", err)
+ }
+}
+
+func TestLicenseStoreGet(t *testing.T) {
+ ss := Setup()
+
+ l1 := model.LicenseRecord{}
+ l1.Id = model.NewId()
+ l1.Bytes = "junk"
+
+ store.Must(ss.License().Save(&l1))
+
+ if r := <-ss.License().Get(l1.Id); r.Err != nil {
+ t.Fatal("couldn't get license", r.Err)
+ } else {
+ if r.Data.(*model.LicenseRecord).Bytes != l1.Bytes {
+ t.Fatal("license bytes didn't match")
+ }
+ }
+
+ if err := (<-ss.License().Get("missing")).Err; err == nil {
+ t.Fatal("should fail on get license", err)
+ }
+}
diff --git a/store/sqlstore/oauth_store.go b/store/sqlstore/oauth_store.go
new file mode 100644
index 000000000..f93f47821
--- /dev/null
+++ b/store/sqlstore/oauth_store.go
@@ -0,0 +1,570 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+ "strings"
+
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlOAuthStore struct {
+ SqlStore
+}
+
+func NewSqlOAuthStore(sqlStore SqlStore) store.OAuthStore {
+ as := &SqlOAuthStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.OAuthApp{}, "OAuthApps").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("CreatorId").SetMaxSize(26)
+ table.ColMap("ClientSecret").SetMaxSize(128)
+ table.ColMap("Name").SetMaxSize(64)
+ table.ColMap("Description").SetMaxSize(512)
+ table.ColMap("CallbackUrls").SetMaxSize(1024)
+ table.ColMap("Homepage").SetMaxSize(256)
+ table.ColMap("IconURL").SetMaxSize(512)
+
+ tableAuth := db.AddTableWithName(model.AuthData{}, "OAuthAuthData").SetKeys(false, "Code")
+ tableAuth.ColMap("UserId").SetMaxSize(26)
+ tableAuth.ColMap("ClientId").SetMaxSize(26)
+ tableAuth.ColMap("Code").SetMaxSize(128)
+ tableAuth.ColMap("RedirectUri").SetMaxSize(256)
+ tableAuth.ColMap("State").SetMaxSize(128)
+ tableAuth.ColMap("Scope").SetMaxSize(128)
+
+ tableAccess := db.AddTableWithName(model.AccessData{}, "OAuthAccessData").SetKeys(false, "Token")
+ tableAccess.ColMap("ClientId").SetMaxSize(26)
+ tableAccess.ColMap("UserId").SetMaxSize(26)
+ tableAccess.ColMap("Token").SetMaxSize(26)
+ tableAccess.ColMap("RefreshToken").SetMaxSize(26)
+ tableAccess.ColMap("RedirectUri").SetMaxSize(256)
+ tableAccess.ColMap("Scope").SetMaxSize(128)
+ tableAccess.SetUniqueTogether("ClientId", "UserId")
+ }
+
+ return as
+}
+
+func (as SqlOAuthStore) CreateIndexesIfNotExists() {
+ as.CreateIndexIfNotExists("idx_oauthapps_creator_id", "OAuthApps", "CreatorId")
+ as.CreateIndexIfNotExists("idx_oauthaccessdata_client_id", "OAuthAccessData", "ClientId")
+ as.CreateIndexIfNotExists("idx_oauthaccessdata_user_id", "OAuthAccessData", "UserId")
+ as.CreateIndexIfNotExists("idx_oauthaccessdata_refresh_token", "OAuthAccessData", "RefreshToken")
+ as.CreateIndexIfNotExists("idx_oauthauthdata_client_id", "OAuthAuthData", "Code")
+}
+
+func (as SqlOAuthStore) SaveApp(app *model.OAuthApp) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(app.Id) > 0 {
+ result.Err = model.NewAppError("SqlOAuthStore.SaveApp", "store.sql_oauth.save_app.existing.app_error", nil, "app_id="+app.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ app.PreSave()
+ if result.Err = app.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := as.GetMaster().Insert(app); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.SaveApp", "store.sql_oauth.save_app.save.app_error", nil, "app_id="+app.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = app
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) UpdateApp(app *model.OAuthApp) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ app.PreUpdate()
+
+ if result.Err = app.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if oldAppResult, err := as.GetMaster().Get(model.OAuthApp{}, app.Id); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.UpdateApp", "store.sql_oauth.update_app.finding.app_error", nil, "app_id="+app.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else if oldAppResult == nil {
+ result.Err = model.NewAppError("SqlOAuthStore.UpdateApp", "store.sql_oauth.update_app.find.app_error", nil, "app_id="+app.Id, http.StatusBadRequest)
+ } else {
+ oldApp := oldAppResult.(*model.OAuthApp)
+ app.CreateAt = oldApp.CreateAt
+ app.CreatorId = oldApp.CreatorId
+
+ if count, err := as.GetMaster().Update(app); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.UpdateApp", "store.sql_oauth.update_app.updating.app_error", nil, "app_id="+app.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else if count != 1 {
+ result.Err = model.NewAppError("SqlOAuthStore.UpdateApp", "store.sql_oauth.update_app.update.app_error", nil, "app_id="+app.Id, http.StatusBadRequest)
+ } else {
+ result.Data = [2]*model.OAuthApp{app, oldApp}
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetApp(id string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if obj, err := as.GetReplica().Get(model.OAuthApp{}, id); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetApp", "store.sql_oauth.get_app.finding.app_error", nil, "app_id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetApp", "store.sql_oauth.get_app.find.app_error", nil, "app_id="+id, http.StatusNotFound)
+ } else {
+ result.Data = obj.(*model.OAuthApp)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetAppByUser(userId string, offset, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var apps []*model.OAuthApp
+
+ if _, err := as.GetReplica().Select(&apps, "SELECT * FROM OAuthApps WHERE CreatorId = :UserId LIMIT :Limit OFFSET :Offset", map[string]interface{}{"UserId": userId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAppByUser", "store.sql_oauth.get_app_by_user.find.app_error", nil, "user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = apps
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetApps(offset, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var apps []*model.OAuthApp
+
+ if _, err := as.GetReplica().Select(&apps, "SELECT * FROM OAuthApps LIMIT :Limit OFFSET :Offset", map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAppByUser", "store.sql_oauth.get_apps.find.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = apps
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetAuthorizedApps(userId string, offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var apps []*model.OAuthApp
+
+ if _, err := as.GetReplica().Select(&apps,
+ `SELECT o.* FROM OAuthApps AS o INNER JOIN
+ Preferences AS p ON p.Name=o.Id AND p.UserId=:UserId LIMIT :Limit OFFSET :Offset`, map[string]interface{}{"UserId": userId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAuthorizedApps", "store.sql_oauth.get_apps.find.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = apps
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) DeleteApp(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ // wrap in a transaction so that if one fails, everything fails
+ transaction, err := as.GetMaster().Begin()
+ if err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_oauth.delete.open_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ if extrasResult := as.deleteApp(transaction, id); extrasResult.Err != nil {
+ result = extrasResult
+ }
+
+ if result.Err == nil {
+ if err := transaction.Commit(); err != nil {
+ // don't need to rollback here since the transaction is already closed
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_oauth.delete.commit_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ if err := transaction.Rollback(); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_oauth.delete.rollback_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) SaveAccessData(accessData *model.AccessData) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if result.Err = accessData.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := as.GetMaster().Insert(accessData); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.SaveAccessData", "store.sql_oauth.save_access_data.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = accessData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetAccessData(token string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ accessData := model.AccessData{}
+
+ if err := as.GetReplica().SelectOne(&accessData, "SELECT * FROM OAuthAccessData WHERE Token = :Token", map[string]interface{}{"Token": token}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAccessData", "store.sql_oauth.get_access_data.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = &accessData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetAccessDataByUserForApp(userId, clientId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var accessData []*model.AccessData
+
+ if _, err := as.GetReplica().Select(&accessData,
+ "SELECT * FROM OAuthAccessData WHERE UserId = :UserId AND ClientId = :ClientId",
+ map[string]interface{}{"UserId": userId, "ClientId": clientId}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAccessDataByUserForApp", "store.sql_oauth.get_access_data_by_user_for_app.app_error", nil, "user_id="+userId+" client_id="+clientId, http.StatusInternalServerError)
+ } else {
+ result.Data = accessData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetAccessDataByRefreshToken(token string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ accessData := model.AccessData{}
+
+ if err := as.GetReplica().SelectOne(&accessData, "SELECT * FROM OAuthAccessData WHERE RefreshToken = :Token", map[string]interface{}{"Token": token}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAccessData", "store.sql_oauth.get_access_data.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = &accessData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetPreviousAccessData(userId, clientId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ accessData := model.AccessData{}
+
+ if err := as.GetReplica().SelectOne(&accessData, "SELECT * FROM OAuthAccessData WHERE ClientId = :ClientId AND UserId = :UserId",
+ map[string]interface{}{"ClientId": clientId, "UserId": userId}); err != nil {
+ if strings.Contains(err.Error(), "no rows") {
+ result.Data = nil
+ } else {
+ result.Err = model.NewAppError("SqlOAuthStore.GetPreviousAccessData", "store.sql_oauth.get_previous_access_data.app_error", nil, err.Error(), http.StatusNotFound)
+ }
+ } else {
+ result.Data = &accessData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) UpdateAccessData(accessData *model.AccessData) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if result.Err = accessData.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if _, err := as.GetMaster().Exec("UPDATE OAuthAccessData SET Token = :Token, ExpiresAt = :ExpiresAt, RefreshToken = :RefreshToken WHERE ClientId = :ClientId AND UserID = :UserId",
+ map[string]interface{}{"Token": accessData.Token, "ExpiresAt": accessData.ExpiresAt, "RefreshToken": accessData.RefreshToken, "ClientId": accessData.ClientId, "UserId": accessData.UserId}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.Update", "store.sql_oauth.update_access_data.app_error", nil,
+ "clientId="+accessData.ClientId+",userId="+accessData.UserId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = accessData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) RemoveAccessData(token string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := as.GetMaster().Exec("DELETE FROM OAuthAccessData WHERE Token = :Token", map[string]interface{}{"Token": token}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.RemoveAccessData", "store.sql_oauth.remove_access_data.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) SaveAuthData(authData *model.AuthData) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ authData.PreSave()
+ if result.Err = authData.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := as.GetMaster().Insert(authData); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.SaveAuthData", "store.sql_oauth.save_auth_data.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = authData
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) GetAuthData(code string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if obj, err := as.GetReplica().Get(model.AuthData{}, code); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAuthData", "store.sql_oauth.get_auth_data.finding.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlOAuthStore.GetAuthData", "store.sql_oauth.get_auth_data.find.app_error", nil, "", http.StatusNotFound)
+ } else {
+ result.Data = obj.(*model.AuthData)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) RemoveAuthData(code string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := as.GetMaster().Exec("DELETE FROM OAuthAuthData WHERE Code = :Code", map[string]interface{}{"Code": code})
+ if err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.RemoveAuthData", "store.sql_oauth.remove_auth_data.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) PermanentDeleteAuthDataByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := as.GetMaster().Exec("DELETE FROM OAuthAccessData WHERE UserId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.RemoveAuthDataByUserId", "store.sql_oauth.permanent_delete_auth_data_by_user.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (as SqlOAuthStore) deleteApp(transaction *gorp.Transaction, clientId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ if _, err := transaction.Exec("DELETE FROM OAuthApps WHERE Id = :Id", map[string]interface{}{"Id": clientId}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_oauth.delete_app.app_error", nil, "id="+clientId+", err="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ return as.deleteOAuthAppSessions(transaction, clientId)
+}
+
+func (as SqlOAuthStore) deleteOAuthAppSessions(transaction *gorp.Transaction, clientId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ query := ""
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query = "DELETE FROM Sessions s USING OAuthAccessData o WHERE o.Token = s.Token AND o.ClientId = :Id"
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ query = "DELETE s.* FROM Sessions s INNER JOIN OAuthAccessData o ON o.Token = s.Token WHERE o.ClientId = :Id"
+ }
+
+ if _, err := transaction.Exec(query, map[string]interface{}{"Id": clientId}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_oauth.delete_app.app_error", nil, "id="+clientId+", err="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ return as.deleteOAuthTokens(transaction, clientId)
+}
+
+func (as SqlOAuthStore) deleteOAuthTokens(transaction *gorp.Transaction, clientId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ if _, err := transaction.Exec("DELETE FROM OAuthAccessData WHERE ClientId = :Id", map[string]interface{}{"Id": clientId}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_oauth.delete_app.app_error", nil, "id="+clientId+", err="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ return as.deleteAppExtras(transaction, clientId)
+}
+
+func (as SqlOAuthStore) deleteAppExtras(transaction *gorp.Transaction, clientId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ if _, err := transaction.Exec(
+ `DELETE FROM
+ Preferences
+ WHERE
+ Category = :Category
+ AND Name = :Name`, map[string]interface{}{"Category": model.PREFERENCE_CATEGORY_AUTHORIZED_OAUTH_APP, "Name": clientId}); err != nil {
+ result.Err = model.NewAppError("SqlOAuthStore.DeleteApp", "store.sql_preference.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ return result
+}
diff --git a/store/sqlstore/oauth_store_test.go b/store/sqlstore/oauth_store_test.go
new file mode 100644
index 000000000..1e4aacfe5
--- /dev/null
+++ b/store/sqlstore/oauth_store_test.go
@@ -0,0 +1,446 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestOAuthStoreSaveApp(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.OAuthApp{}
+ a1.CreatorId = model.NewId()
+ a1.CallbackUrls = []string{"https://nowhere.com"}
+ a1.Homepage = "https://nowhere.com"
+
+ // Try to save an app that already has an Id
+ a1.Id = model.NewId()
+ if err := (<-ss.OAuth().SaveApp(&a1)).Err; err == nil {
+ t.Fatal("Should have failed, cannot add an OAuth app cannot be save with an Id, it has to be updated")
+ }
+
+ // Try to save an Invalid App
+ a1.Id = ""
+ if err := (<-ss.OAuth().SaveApp(&a1)).Err; err == nil {
+ t.Fatal("Should have failed, app should be invalid cause it doesn' have a name set")
+ }
+
+ // Save the app
+ a1.Id = ""
+ a1.Name = "TestApp" + model.NewId()
+ if err := (<-ss.OAuth().SaveApp(&a1)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestOAuthStoreGetApp(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.OAuthApp{}
+ a1.CreatorId = model.NewId()
+ a1.Name = "TestApp" + model.NewId()
+ a1.CallbackUrls = []string{"https://nowhere.com"}
+ a1.Homepage = "https://nowhere.com"
+ store.Must(ss.OAuth().SaveApp(&a1))
+
+ // Lets try to get and app that does not exists
+ if err := (<-ss.OAuth().GetApp("fake0123456789abcderfgret1")).Err; err == nil {
+ t.Fatal("Should have failed. App does not exists")
+ }
+
+ if err := (<-ss.OAuth().GetApp(a1.Id)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ // Lets try and get the app from a user that hasn't created any apps
+ if result := (<-ss.OAuth().GetAppByUser("fake0123456789abcderfgret1", 0, 1000)); result.Err == nil {
+ if len(result.Data.([]*model.OAuthApp)) > 0 {
+ t.Fatal("Should have failed. Fake user hasn't created any apps")
+ }
+ } else {
+ t.Fatal(result.Err)
+ }
+
+ if err := (<-ss.OAuth().GetAppByUser(a1.CreatorId, 0, 1000)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.OAuth().GetApps(0, 1000)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestOAuthStoreUpdateApp(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.OAuthApp{}
+ a1.CreatorId = model.NewId()
+ a1.Name = "TestApp" + model.NewId()
+ a1.CallbackUrls = []string{"https://nowhere.com"}
+ a1.Homepage = "https://nowhere.com"
+ store.Must(ss.OAuth().SaveApp(&a1))
+
+ // temporarily save the created app id
+ id := a1.Id
+
+ a1.CreateAt = 1
+ a1.ClientSecret = "pwd"
+ a1.CreatorId = "12345678901234567890123456"
+
+ // Lets update the app by removing the name
+ a1.Name = ""
+ if result := <-ss.OAuth().UpdateApp(&a1); result.Err == nil {
+ t.Fatal("Should have failed. App name is not set")
+ }
+
+ // Lets not find the app that we are trying to update
+ a1.Id = "fake0123456789abcderfgret1"
+ a1.Name = "NewName"
+ if result := <-ss.OAuth().UpdateApp(&a1); result.Err == nil {
+ t.Fatal("Should have failed. Not able to find the app")
+ }
+
+ a1.Id = id
+ if result := <-ss.OAuth().UpdateApp(&a1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ ua1 := (result.Data.([2]*model.OAuthApp)[0])
+ if ua1.Name != "NewName" {
+ t.Fatal("name did not update")
+ }
+ if ua1.CreateAt == 1 {
+ t.Fatal("create at should not have updated")
+ }
+ if ua1.CreatorId == "12345678901234567890123456" {
+ t.Fatal("creator id should not have updated")
+ }
+ }
+}
+
+func TestOAuthStoreSaveAccessData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AccessData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+
+ // Lets try and save an incomplete access data
+ if err := (<-ss.OAuth().SaveAccessData(&a1)).Err; err == nil {
+ t.Fatal("Should have failed. Access data needs the token")
+ }
+
+ a1.Token = model.NewId()
+ a1.RefreshToken = model.NewId()
+ a1.RedirectUri = "http://example.com"
+
+ if err := (<-ss.OAuth().SaveAccessData(&a1)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestOAuthUpdateAccessData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AccessData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Token = model.NewId()
+ a1.RefreshToken = model.NewId()
+ a1.ExpiresAt = model.GetMillis()
+ a1.RedirectUri = "http://example.com"
+ store.Must(ss.OAuth().SaveAccessData(&a1))
+
+ //Try to update to invalid Refresh Token
+ refreshToken := a1.RefreshToken
+ a1.RefreshToken = model.NewId() + "123"
+ if err := (<-ss.OAuth().UpdateAccessData(&a1)).Err; err == nil {
+ t.Fatal("Should have failed with invalid token")
+ }
+
+ //Try to update to invalid RedirectUri
+ a1.RefreshToken = model.NewId()
+ a1.RedirectUri = ""
+ if err := (<-ss.OAuth().UpdateAccessData(&a1)).Err; err == nil {
+ t.Fatal("Should have failed with invalid Redirect URI")
+ }
+
+ // Should update fine
+ a1.RedirectUri = "http://example.com"
+ if result := <-ss.OAuth().UpdateAccessData(&a1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ ra1 := result.Data.(*model.AccessData)
+ if ra1.RefreshToken == refreshToken {
+ t.Fatal("refresh tokens didn't match")
+ }
+ }
+}
+
+func TestOAuthStoreGetAccessData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AccessData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Token = model.NewId()
+ a1.RefreshToken = model.NewId()
+ a1.ExpiresAt = model.GetMillis()
+ a1.RedirectUri = "http://example.com"
+ store.Must(ss.OAuth().SaveAccessData(&a1))
+
+ if err := (<-ss.OAuth().GetAccessData("invalidToken")).Err; err == nil {
+ t.Fatal("Should have failed. There is no data with an invalid token")
+ }
+
+ if result := <-ss.OAuth().GetAccessData(a1.Token); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ ra1 := result.Data.(*model.AccessData)
+ if a1.Token != ra1.Token {
+ t.Fatal("tokens didn't match")
+ }
+ }
+
+ if err := (<-ss.OAuth().GetPreviousAccessData(a1.UserId, a1.ClientId)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.OAuth().GetPreviousAccessData("user", "junk")).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ // Try to get the Access data using an invalid refresh token
+ if err := (<-ss.OAuth().GetAccessDataByRefreshToken(a1.Token)).Err; err == nil {
+ t.Fatal("Should have failed. There is no data with an invalid token")
+ }
+
+ // Get the Access Data using the refresh token
+ if result := <-ss.OAuth().GetAccessDataByRefreshToken(a1.RefreshToken); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ ra1 := result.Data.(*model.AccessData)
+ if a1.RefreshToken != ra1.RefreshToken {
+ t.Fatal("tokens didn't match")
+ }
+ }
+}
+
+func TestOAuthStoreRemoveAccessData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AccessData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Token = model.NewId()
+ a1.RefreshToken = model.NewId()
+ a1.RedirectUri = "http://example.com"
+ store.Must(ss.OAuth().SaveAccessData(&a1))
+
+ if err := (<-ss.OAuth().RemoveAccessData(a1.Token)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if result := (<-ss.OAuth().GetPreviousAccessData(a1.UserId, a1.ClientId)); result.Err != nil {
+ } else {
+ if result.Data != nil {
+ t.Fatal("did not delete access token")
+ }
+ }
+}
+
+func TestOAuthStoreSaveAuthData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AuthData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Code = model.NewId()
+ a1.RedirectUri = "http://example.com"
+ if err := (<-ss.OAuth().SaveAuthData(&a1)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestOAuthStoreGetAuthData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AuthData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Code = model.NewId()
+ a1.RedirectUri = "http://example.com"
+ store.Must(ss.OAuth().SaveAuthData(&a1))
+
+ if err := (<-ss.OAuth().GetAuthData(a1.Code)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestOAuthStoreRemoveAuthData(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AuthData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Code = model.NewId()
+ a1.RedirectUri = "http://example.com"
+ store.Must(ss.OAuth().SaveAuthData(&a1))
+
+ if err := (<-ss.OAuth().RemoveAuthData(a1.Code)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.OAuth().GetAuthData(a1.Code)).Err; err == nil {
+ t.Fatal("should have errored - auth code removed")
+ }
+}
+
+func TestOAuthStoreRemoveAuthDataByUser(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.AuthData{}
+ a1.ClientId = model.NewId()
+ a1.UserId = model.NewId()
+ a1.Code = model.NewId()
+ a1.RedirectUri = "http://example.com"
+ store.Must(ss.OAuth().SaveAuthData(&a1))
+
+ if err := (<-ss.OAuth().PermanentDeleteAuthDataByUser(a1.UserId)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestOAuthGetAuthorizedApps(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.OAuthApp{}
+ a1.CreatorId = model.NewId()
+ a1.Name = "TestApp" + model.NewId()
+ a1.CallbackUrls = []string{"https://nowhere.com"}
+ a1.Homepage = "https://nowhere.com"
+ store.Must(ss.OAuth().SaveApp(&a1))
+
+ // Lets try and get an Authorized app for a user who hasn't authorized it
+ if result := <-ss.OAuth().GetAuthorizedApps("fake0123456789abcderfgret1", 0, 1000); result.Err == nil {
+ if len(result.Data.([]*model.OAuthApp)) > 0 {
+ t.Fatal("Should have failed. Fake user hasn't authorized the app")
+ }
+ } else {
+ t.Fatal(result.Err)
+ }
+
+ // allow the app
+ p := model.Preference{}
+ p.UserId = a1.CreatorId
+ p.Category = model.PREFERENCE_CATEGORY_AUTHORIZED_OAUTH_APP
+ p.Name = a1.Id
+ p.Value = "true"
+ store.Must(ss.Preference().Save(&model.Preferences{p}))
+
+ if result := <-ss.OAuth().GetAuthorizedApps(a1.CreatorId, 0, 1000); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ apps := result.Data.([]*model.OAuthApp)
+ if len(apps) == 0 {
+ t.Fatal("It should have return apps")
+ }
+ }
+}
+
+func TestOAuthGetAccessDataByUserForApp(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.OAuthApp{}
+ a1.CreatorId = model.NewId()
+ a1.Name = "TestApp" + model.NewId()
+ a1.CallbackUrls = []string{"https://nowhere.com"}
+ a1.Homepage = "https://nowhere.com"
+ store.Must(ss.OAuth().SaveApp(&a1))
+
+ // allow the app
+ p := model.Preference{}
+ p.UserId = a1.CreatorId
+ p.Category = model.PREFERENCE_CATEGORY_AUTHORIZED_OAUTH_APP
+ p.Name = a1.Id
+ p.Value = "true"
+ store.Must(ss.Preference().Save(&model.Preferences{p}))
+
+ if result := <-ss.OAuth().GetAuthorizedApps(a1.CreatorId, 0, 1000); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ apps := result.Data.([]*model.OAuthApp)
+ if len(apps) == 0 {
+ t.Fatal("It should have return apps")
+ }
+ }
+
+ // save the token
+ ad1 := model.AccessData{}
+ ad1.ClientId = a1.Id
+ ad1.UserId = a1.CreatorId
+ ad1.Token = model.NewId()
+ ad1.RefreshToken = model.NewId()
+ ad1.RedirectUri = "http://example.com"
+
+ if err := (<-ss.OAuth().SaveAccessData(&ad1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if result := <-ss.OAuth().GetAccessDataByUserForApp(a1.CreatorId, a1.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ accessData := result.Data.([]*model.AccessData)
+ if len(accessData) == 0 {
+ t.Fatal("It should have return access data")
+ }
+ }
+}
+
+func TestOAuthStoreDeleteApp(t *testing.T) {
+ ss := Setup()
+
+ a1 := model.OAuthApp{}
+ a1.CreatorId = model.NewId()
+ a1.Name = "TestApp" + model.NewId()
+ a1.CallbackUrls = []string{"https://nowhere.com"}
+ a1.Homepage = "https://nowhere.com"
+ store.Must(ss.OAuth().SaveApp(&a1))
+
+ // delete a non-existent app
+ if err := (<-ss.OAuth().DeleteApp("fakeclientId")).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ s1.Token = model.NewId()
+ s1.IsOAuth = true
+
+ store.Must(ss.Session().Save(&s1))
+
+ ad1 := model.AccessData{}
+ ad1.ClientId = a1.Id
+ ad1.UserId = a1.CreatorId
+ ad1.Token = s1.Token
+ ad1.RefreshToken = model.NewId()
+ ad1.RedirectUri = "http://example.com"
+
+ store.Must(ss.OAuth().SaveAccessData(&ad1))
+
+ if err := (<-ss.OAuth().DeleteApp(a1.Id)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.Session().Get(s1.Token)).Err; err == nil {
+ t.Fatal("should error - session should be deleted")
+ }
+
+ if err := (<-ss.OAuth().GetAccessData(s1.Token)).Err; err == nil {
+ t.Fatal("should error - access data should be deleted")
+ }
+}
diff --git a/store/sqlstore/post_store.go b/store/sqlstore/post_store.go
new file mode 100644
index 000000000..fb82dd724
--- /dev/null
+++ b/store/sqlstore/post_store.go
@@ -0,0 +1,1393 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "fmt"
+ "net/http"
+ "regexp"
+ "strconv"
+ "strings"
+
+ l4g "github.com/alecthomas/log4go"
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlPostStore struct {
+ SqlStore
+ metrics einterfaces.MetricsInterface
+}
+
+const (
+ LAST_POST_TIME_CACHE_SIZE = 25000
+ LAST_POST_TIME_CACHE_SEC = 900 // 15 minutes
+
+ LAST_POSTS_CACHE_SIZE = 1000
+ LAST_POSTS_CACHE_SEC = 900 // 15 minutes
+)
+
+var lastPostTimeCache = utils.NewLru(LAST_POST_TIME_CACHE_SIZE)
+var lastPostsCache = utils.NewLru(LAST_POSTS_CACHE_SIZE)
+
+func ClearPostCaches() {
+ lastPostTimeCache.Purge()
+ lastPostsCache.Purge()
+}
+
+func NewSqlPostStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.PostStore {
+ s := &SqlPostStore{
+ SqlStore: sqlStore,
+ metrics: metrics,
+ }
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Post{}, "Posts").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("ChannelId").SetMaxSize(26)
+ table.ColMap("RootId").SetMaxSize(26)
+ table.ColMap("ParentId").SetMaxSize(26)
+ table.ColMap("OriginalId").SetMaxSize(26)
+ table.ColMap("Message").SetMaxSize(4000)
+ table.ColMap("Type").SetMaxSize(26)
+ table.ColMap("Hashtags").SetMaxSize(1000)
+ table.ColMap("Props").SetMaxSize(8000)
+ table.ColMap("Filenames").SetMaxSize(4000)
+ table.ColMap("FileIds").SetMaxSize(150)
+ }
+
+ return s
+}
+
+func (s SqlPostStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_posts_update_at", "Posts", "UpdateAt")
+ s.CreateIndexIfNotExists("idx_posts_create_at", "Posts", "CreateAt")
+ s.CreateIndexIfNotExists("idx_posts_delete_at", "Posts", "DeleteAt")
+ s.CreateIndexIfNotExists("idx_posts_channel_id", "Posts", "ChannelId")
+ s.CreateIndexIfNotExists("idx_posts_root_id", "Posts", "RootId")
+ s.CreateIndexIfNotExists("idx_posts_user_id", "Posts", "UserId")
+ s.CreateIndexIfNotExists("idx_posts_is_pinned", "Posts", "IsPinned")
+
+ s.CreateFullTextIndexIfNotExists("idx_posts_message_txt", "Posts", "Message")
+ s.CreateFullTextIndexIfNotExists("idx_posts_hashtags_txt", "Posts", "Hashtags")
+}
+
+func (s SqlPostStore) Save(post *model.Post) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(post.Id) > 0 {
+ result.Err = model.NewAppError("SqlPostStore.Save", "store.sql_post.save.existing.app_error", nil, "id="+post.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ post.PreSave()
+ if result.Err = post.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(post); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Save", "store.sql_post.save.app_error", nil, "id="+post.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ time := post.UpdateAt
+
+ if post.Type != model.POST_JOIN_LEAVE && post.Type != model.POST_JOIN_CHANNEL && post.Type != model.POST_LEAVE_CHANNEL &&
+ post.Type != model.POST_ADD_REMOVE && post.Type != model.POST_ADD_TO_CHANNEL && post.Type != model.POST_REMOVE_FROM_CHANNEL {
+ s.GetMaster().Exec("UPDATE Channels SET LastPostAt = :LastPostAt, TotalMsgCount = TotalMsgCount + 1 WHERE Id = :ChannelId", map[string]interface{}{"LastPostAt": time, "ChannelId": post.ChannelId})
+ } else {
+ // don't update TotalMsgCount for unimportant messages so that the channel isn't marked as unread
+ s.GetMaster().Exec("UPDATE Channels SET LastPostAt = :LastPostAt WHERE Id = :ChannelId", map[string]interface{}{"LastPostAt": time, "ChannelId": post.ChannelId})
+ }
+
+ if len(post.RootId) > 0 {
+ s.GetMaster().Exec("UPDATE Posts SET UpdateAt = :UpdateAt WHERE Id = :RootId", map[string]interface{}{"UpdateAt": time, "RootId": post.RootId})
+ }
+
+ result.Data = post
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) Update(newPost *model.Post, oldPost *model.Post) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ newPost.UpdateAt = model.GetMillis()
+ newPost.PreCommit()
+
+ oldPost.DeleteAt = newPost.UpdateAt
+ oldPost.UpdateAt = newPost.UpdateAt
+ oldPost.OriginalId = oldPost.Id
+ oldPost.Id = model.NewId()
+ oldPost.PreCommit()
+
+ if result.Err = newPost.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if _, err := s.GetMaster().Update(newPost); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Update", "store.sql_post.update.app_error", nil, "id="+newPost.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ time := model.GetMillis()
+ s.GetMaster().Exec("UPDATE Channels SET LastPostAt = :LastPostAt WHERE Id = :ChannelId", map[string]interface{}{"LastPostAt": time, "ChannelId": newPost.ChannelId})
+
+ if len(newPost.RootId) > 0 {
+ s.GetMaster().Exec("UPDATE Posts SET UpdateAt = :UpdateAt WHERE Id = :RootId", map[string]interface{}{"UpdateAt": time, "RootId": newPost.RootId})
+ }
+
+ // mark the old post as deleted
+ s.GetMaster().Insert(oldPost)
+
+ result.Data = newPost
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) Overwrite(post *model.Post) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ post.UpdateAt = model.GetMillis()
+
+ if result.Err = post.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if _, err := s.GetMaster().Update(post); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Overwrite", "store.sql_post.overwrite.app_error", nil, "id="+post.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = post
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetFlaggedPosts(userId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+ go func() {
+ result := store.StoreResult{}
+ pl := model.NewPostList()
+
+ var posts []*model.Post
+ if _, err := s.GetReplica().Select(&posts, "SELECT * FROM Posts WHERE Id IN (SELECT Name FROM Preferences WHERE UserId = :UserId AND Category = :Category) AND DeleteAt = 0 ORDER BY CreateAt DESC LIMIT :Limit OFFSET :Offset", map[string]interface{}{"UserId": userId, "Category": model.PREFERENCE_CATEGORY_FLAGGED_POST, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetFlaggedPosts", "store.sql_post.get_flagged_posts.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, post := range posts {
+ pl.AddPost(post)
+ pl.AddOrder(post.Id)
+ }
+ }
+
+ result.Data = pl
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetFlaggedPostsForTeam(userId, teamId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+ go func() {
+ result := store.StoreResult{}
+ pl := model.NewPostList()
+
+ var posts []*model.Post
+
+ query := `
+ SELECT
+ A.*
+ FROM
+ (SELECT
+ *
+ FROM
+ Posts
+ WHERE
+ Id
+ IN
+ (SELECT
+ Name
+ FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category)
+ AND DeleteAt = 0
+ ) as A
+ INNER JOIN Channels as B
+ ON B.Id = A.ChannelId
+ WHERE B.TeamId = :TeamId OR B.TeamId = ''
+ ORDER BY CreateAt DESC
+ LIMIT :Limit OFFSET :Offset`
+
+ if _, err := s.GetReplica().Select(&posts, query, map[string]interface{}{"UserId": userId, "Category": model.PREFERENCE_CATEGORY_FLAGGED_POST, "Offset": offset, "Limit": limit, "TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetFlaggedPostsForTeam", "store.sql_post.get_flagged_posts.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, post := range posts {
+ pl.AddPost(post)
+ pl.AddOrder(post.Id)
+ }
+ }
+
+ result.Data = pl
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetFlaggedPostsForChannel(userId, channelId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+ go func() {
+ result := store.StoreResult{}
+ pl := model.NewPostList()
+
+ var posts []*model.Post
+ query := `
+ SELECT
+ *
+ FROM Posts
+ WHERE
+ Id IN (SELECT Name FROM Preferences WHERE UserId = :UserId AND Category = :Category)
+ AND ChannelId = :ChannelId
+ AND DeleteAt = 0
+ ORDER BY CreateAt DESC
+ LIMIT :Limit OFFSET :Offset`
+
+ if _, err := s.GetReplica().Select(&posts, query, map[string]interface{}{"UserId": userId, "Category": model.PREFERENCE_CATEGORY_FLAGGED_POST, "ChannelId": channelId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetFlaggedPostsForChannel", "store.sql_post.get_flagged_posts.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, post := range posts {
+ pl.AddPost(post)
+ pl.AddOrder(post.Id)
+ }
+ }
+
+ result.Data = pl
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) Get(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ pl := model.NewPostList()
+
+ if len(id) == 0 {
+ result.Err = model.NewAppError("SqlPostStore.GetPost", "store.sql_post.get.app_error", nil, "id="+id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ var post model.Post
+ err := s.GetReplica().SelectOne(&post, "SELECT * FROM Posts WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": id})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPost", "store.sql_post.get.app_error", nil, "id="+id+err.Error(), http.StatusNotFound)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ pl.AddPost(&post)
+ pl.AddOrder(id)
+
+ rootId := post.RootId
+
+ if rootId == "" {
+ rootId = post.Id
+ }
+
+ if len(rootId) == 0 {
+ result.Err = model.NewAppError("SqlPostStore.GetPost", "store.sql_post.get.app_error", nil, "root_id="+rootId, http.StatusInternalServerError)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ var posts []*model.Post
+ _, err = s.GetReplica().Select(&posts, "SELECT * FROM Posts WHERE (Id = :Id OR RootId = :RootId) AND DeleteAt = 0", map[string]interface{}{"Id": rootId, "RootId": rootId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPost", "store.sql_post.get.app_error", nil, "root_id="+rootId+err.Error(), http.StatusInternalServerError)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ for _, p := range posts {
+ pl.AddPost(p)
+ }
+ }
+
+ result.Data = pl
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetSingle(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var post model.Post
+ err := s.GetReplica().SelectOne(&post, "SELECT * FROM Posts WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": id})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetSingle", "store.sql_post.get.app_error", nil, "id="+id+err.Error(), http.StatusNotFound)
+ }
+
+ result.Data = &post
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+type etagPosts struct {
+ Id string
+ UpdateAt int64
+}
+
+func (s SqlPostStore) InvalidateLastPostTimeCache(channelId string) {
+ lastPostTimeCache.Remove(channelId)
+ lastPostsCache.Remove(channelId)
+}
+
+func (s SqlPostStore) GetEtag(channelId string, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := lastPostTimeCache.Get(channelId); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Last Post Time")
+ }
+ result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, cacheItem.(int64))
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Last Post Time")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Last Post Time")
+ }
+ }
+
+ var et etagPosts
+ err := s.GetReplica().SelectOne(&et, "SELECT Id, UpdateAt FROM Posts WHERE ChannelId = :ChannelId ORDER BY UpdateAt DESC LIMIT 1", map[string]interface{}{"ChannelId": channelId})
+ if err != nil {
+ result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, model.GetMillis())
+ } else {
+ result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, et.UpdateAt)
+ }
+
+ lastPostTimeCache.AddWithExpiresInSecs(channelId, et.UpdateAt, LAST_POST_TIME_CACHE_SEC)
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) Delete(postId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("Update Posts SET DeleteAt = :DeleteAt, UpdateAt = :UpdateAt WHERE Id = :Id OR RootId = :RootId", map[string]interface{}{"DeleteAt": time, "UpdateAt": time, "Id": postId, "RootId": postId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Delete", "store.sql_post.delete.app_error", nil, "id="+postId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) permanentDelete(postId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM Posts WHERE Id = :Id OR RootId = :RootId", map[string]interface{}{"Id": postId, "RootId": postId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Delete", "store.sql_post.permanent_delete.app_error", nil, "id="+postId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) permanentDeleteAllCommentByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM Posts WHERE UserId = :UserId AND RootId != ''", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.permanentDeleteAllCommentByUser", "store.sql_post.permanent_delete_all_comments_by_user.app_error", nil, "userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) PermanentDeleteByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ // First attempt to delete all the comments for a user
+ if r := <-s.permanentDeleteAllCommentByUser(userId); r.Err != nil {
+ result.Err = r.Err
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ // Now attempt to delete all the root posts for a user. This will also
+ // delete all the comments for each post.
+ found := true
+ count := 0
+
+ for found {
+ var ids []string
+ _, err := s.GetMaster().Select(&ids, "SELECT Id FROM Posts WHERE UserId = :UserId LIMIT 1000", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.PermanentDeleteByUser.select", "store.sql_post.permanent_delete_by_user.app_error", nil, "userId="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ found = false
+ for _, id := range ids {
+ found = true
+ if r := <-s.permanentDelete(id); r.Err != nil {
+ result.Err = r.Err
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+ }
+ }
+
+ // This is a fail safe, give up if more than 10K messages
+ count = count + 1
+ if count >= 10 {
+ result.Err = model.NewAppError("SqlPostStore.PermanentDeleteByUser.toolarge", "store.sql_post.permanent_delete_by_user.too_many.app_error", nil, "userId="+userId, http.StatusInternalServerError)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) PermanentDeleteByChannel(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM Posts WHERE ChannelId = :ChannelId", map[string]interface{}{"ChannelId": channelId}); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.PermanentDeleteByChannel", "store.sql_post.permanent_delete_by_channel.app_error", nil, "channel_id="+channelId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if limit > 1000 {
+ result.Err = model.NewAppError("SqlPostStore.GetLinearPosts", "store.sql_post.get_posts.app_error", nil, "channelId="+channelId, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if allowFromCache && offset == 0 && limit == 60 {
+ if cacheItem, ok := lastPostsCache.Get(channelId); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Last Posts Cache")
+ }
+
+ result.Data = cacheItem.(*model.PostList)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Last Posts Cache")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Last Posts Cache")
+ }
+ }
+
+ rpc := s.getRootPosts(channelId, offset, limit)
+ cpc := s.getParentsPosts(channelId, offset, limit)
+
+ if rpr := <-rpc; rpr.Err != nil {
+ result.Err = rpr.Err
+ } else if cpr := <-cpc; cpr.Err != nil {
+ result.Err = cpr.Err
+ } else {
+ posts := rpr.Data.([]*model.Post)
+ parents := cpr.Data.([]*model.Post)
+
+ list := model.NewPostList()
+
+ for _, p := range posts {
+ list.AddPost(p)
+ list.AddOrder(p.Id)
+ }
+
+ for _, p := range parents {
+ list.AddPost(p)
+ }
+
+ list.MakeNonNil()
+
+ if offset == 0 && limit == 60 {
+ lastPostsCache.AddWithExpiresInSecs(channelId, list, LAST_POSTS_CACHE_SEC)
+ }
+
+ result.Data = list
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetPostsSince(channelId string, time int64, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ // If the last post in the channel's time is less than or equal to the time we are getting posts since,
+ // we can safely return no posts.
+ if cacheItem, ok := lastPostTimeCache.Get(channelId); ok && cacheItem.(int64) <= time {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Last Post Time")
+ }
+ list := model.NewPostList()
+ result.Data = list
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Last Post Time")
+ }
+ }
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Last Post Time")
+ }
+ }
+
+ var posts []*model.Post
+ _, err := s.GetReplica().Select(&posts,
+ `(SELECT
+ *
+ FROM
+ Posts
+ WHERE
+ (UpdateAt > :Time
+ AND ChannelId = :ChannelId)
+ LIMIT 1000)
+ UNION
+ (SELECT
+ *
+ FROM
+ Posts
+ WHERE
+ Id
+ IN
+ (SELECT * FROM (SELECT
+ RootId
+ FROM
+ Posts
+ WHERE
+ UpdateAt > :Time
+ AND ChannelId = :ChannelId
+ LIMIT 1000) temp_tab))
+ ORDER BY CreateAt DESC`,
+ map[string]interface{}{"ChannelId": channelId, "Time": time})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPostsSince", "store.sql_post.get_posts_since.app_error", nil, "channelId="+channelId+err.Error(), http.StatusInternalServerError)
+ } else {
+
+ list := model.NewPostList()
+
+ var latestUpdate int64 = 0
+
+ for _, p := range posts {
+ list.AddPost(p)
+ if p.UpdateAt > time {
+ list.AddOrder(p.Id)
+ }
+ if latestUpdate < p.UpdateAt {
+ latestUpdate = p.UpdateAt
+ }
+ }
+
+ lastPostTimeCache.AddWithExpiresInSecs(channelId, latestUpdate, LAST_POST_TIME_CACHE_SEC)
+
+ result.Data = list
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetPostsBefore(channelId string, postId string, numPosts int, offset int) store.StoreChannel {
+ return s.getPostsAround(channelId, postId, numPosts, offset, true)
+}
+
+func (s SqlPostStore) GetPostsAfter(channelId string, postId string, numPosts int, offset int) store.StoreChannel {
+ return s.getPostsAround(channelId, postId, numPosts, offset, false)
+}
+
+func (s SqlPostStore) getPostsAround(channelId string, postId string, numPosts int, offset int, before bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var direction string
+ var sort string
+ if before {
+ direction = "<"
+ sort = "DESC"
+ } else {
+ direction = ">"
+ sort = "ASC"
+ }
+
+ var posts []*model.Post
+ var parents []*model.Post
+ _, err1 := s.GetReplica().Select(&posts,
+ `(SELECT
+ *
+ FROM
+ Posts
+ WHERE
+ (CreateAt `+direction+` (SELECT CreateAt FROM Posts WHERE Id = :PostId)
+ AND ChannelId = :ChannelId
+ AND DeleteAt = 0)
+ ORDER BY CreateAt `+sort+`
+ LIMIT :NumPosts
+ OFFSET :Offset)`,
+ map[string]interface{}{"ChannelId": channelId, "PostId": postId, "NumPosts": numPosts, "Offset": offset})
+ _, err2 := s.GetReplica().Select(&parents,
+ `(SELECT
+ *
+ FROM
+ Posts
+ WHERE
+ Id
+ IN
+ (SELECT * FROM (SELECT
+ RootId
+ FROM
+ Posts
+ WHERE
+ (CreateAt `+direction+` (SELECT CreateAt FROM Posts WHERE Id = :PostId)
+ AND ChannelId = :ChannelId
+ AND DeleteAt = 0)
+ ORDER BY CreateAt `+sort+`
+ LIMIT :NumPosts
+ OFFSET :Offset)
+ temp_tab))
+ ORDER BY CreateAt DESC`,
+ map[string]interface{}{"ChannelId": channelId, "PostId": postId, "NumPosts": numPosts, "Offset": offset})
+
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPostContext", "store.sql_post.get_posts_around.get.app_error", nil, "channelId="+channelId+err1.Error(), http.StatusInternalServerError)
+ } else if err2 != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPostContext", "store.sql_post.get_posts_around.get_parent.app_error", nil, "channelId="+channelId+err2.Error(), http.StatusInternalServerError)
+ } else {
+
+ list := model.NewPostList()
+
+ // We need to flip the order if we selected backwards
+ if before {
+ for _, p := range posts {
+ list.AddPost(p)
+ list.AddOrder(p.Id)
+ }
+ } else {
+ l := len(posts)
+ for i := range posts {
+ list.AddPost(posts[l-i-1])
+ list.AddOrder(posts[l-i-1].Id)
+ }
+ }
+
+ for _, p := range parents {
+ list.AddPost(p)
+ }
+
+ result.Data = list
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) getRootPosts(channelId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var posts []*model.Post
+ _, err := s.GetReplica().Select(&posts, "SELECT * FROM Posts WHERE ChannelId = :ChannelId AND DeleteAt = 0 ORDER BY CreateAt DESC LIMIT :Limit OFFSET :Offset", map[string]interface{}{"ChannelId": channelId, "Offset": offset, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetLinearPosts", "store.sql_post.get_root_posts.app_error", nil, "channelId="+channelId+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = posts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) getParentsPosts(channelId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var posts []*model.Post
+ _, err := s.GetReplica().Select(&posts,
+ `SELECT
+ q2.*
+ FROM
+ Posts q2
+ INNER JOIN
+ (SELECT DISTINCT
+ q3.RootId
+ FROM
+ (SELECT
+ RootId
+ FROM
+ Posts
+ WHERE
+ ChannelId = :ChannelId1
+ AND DeleteAt = 0
+ ORDER BY CreateAt DESC
+ LIMIT :Limit OFFSET :Offset) q3
+ WHERE q3.RootId != '') q1
+ ON q1.RootId = q2.Id OR q1.RootId = q2.RootId
+ WHERE
+ ChannelId = :ChannelId2
+ AND DeleteAt = 0
+ ORDER BY CreateAt`,
+ map[string]interface{}{"ChannelId1": channelId, "Offset": offset, "Limit": limit, "ChannelId2": channelId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetLinearPosts", "store.sql_post.get_parents_posts.app_error", nil, "channelId="+channelId+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = posts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+var specialSearchChar = []string{
+ "<",
+ ">",
+ "+",
+ "-",
+ "(",
+ ")",
+ "~",
+ "@",
+ ":",
+}
+
+func (s SqlPostStore) Search(teamId string, userId string, params *model.SearchParams) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if !*utils.Cfg.ServiceSettings.EnablePostSearch {
+ list := model.NewPostList()
+ result.Data = list
+
+ result.Err = model.NewAppError("SqlPostStore.Search", "store.sql_post.search.disabled", nil, fmt.Sprintf("teamId=%v userId=%v params=%v", teamId, userId, params.ToJson()), http.StatusNotImplemented)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ queryParams := map[string]interface{}{
+ "TeamId": teamId,
+ "UserId": userId,
+ }
+
+ termMap := map[string]bool{}
+ terms := params.Terms
+
+ if terms == "" && len(params.InChannels) == 0 && len(params.FromUsers) == 0 {
+ result.Data = []*model.Post{}
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ searchType := "Message"
+ if params.IsHashtag {
+ searchType = "Hashtags"
+ for _, term := range strings.Split(terms, " ") {
+ termMap[strings.ToUpper(term)] = true
+ }
+ }
+
+ // these chars have special meaning and can be treated as spaces
+ for _, c := range specialSearchChar {
+ terms = strings.Replace(terms, c, " ", -1)
+ }
+
+ var posts []*model.Post
+
+ searchQuery := `
+ SELECT
+ *
+ FROM
+ Posts
+ WHERE
+ DeleteAt = 0
+ AND Type NOT LIKE '` + model.POST_SYSTEM_MESSAGE_PREFIX + `%'
+ POST_FILTER
+ AND ChannelId IN (
+ SELECT
+ Id
+ FROM
+ Channels,
+ ChannelMembers
+ WHERE
+ Id = ChannelId
+ AND (TeamId = :TeamId OR TeamId = '')
+ AND UserId = :UserId
+ AND DeleteAt = 0
+ CHANNEL_FILTER)
+ SEARCH_CLAUSE
+ ORDER BY CreateAt DESC
+ LIMIT 100`
+
+ if len(params.InChannels) > 1 {
+ inClause := ":InChannel0"
+ queryParams["InChannel0"] = params.InChannels[0]
+
+ for i := 1; i < len(params.InChannels); i++ {
+ paramName := "InChannel" + strconv.FormatInt(int64(i), 10)
+ inClause += ", :" + paramName
+ queryParams[paramName] = params.InChannels[i]
+ }
+
+ searchQuery = strings.Replace(searchQuery, "CHANNEL_FILTER", "AND Name IN ("+inClause+")", 1)
+ } else if len(params.InChannels) == 1 {
+ queryParams["InChannel"] = params.InChannels[0]
+ searchQuery = strings.Replace(searchQuery, "CHANNEL_FILTER", "AND Name = :InChannel", 1)
+ } else {
+ searchQuery = strings.Replace(searchQuery, "CHANNEL_FILTER", "", 1)
+ }
+
+ if len(params.FromUsers) > 1 {
+ inClause := ":FromUser0"
+ queryParams["FromUser0"] = params.FromUsers[0]
+
+ for i := 1; i < len(params.FromUsers); i++ {
+ paramName := "FromUser" + strconv.FormatInt(int64(i), 10)
+ inClause += ", :" + paramName
+ queryParams[paramName] = params.FromUsers[i]
+ }
+
+ searchQuery = strings.Replace(searchQuery, "POST_FILTER", `
+ AND UserId IN (
+ SELECT
+ Id
+ FROM
+ Users,
+ TeamMembers
+ WHERE
+ TeamMembers.TeamId = :TeamId
+ AND Users.Id = TeamMembers.UserId
+ AND Username IN (`+inClause+`))`, 1)
+ } else if len(params.FromUsers) == 1 {
+ queryParams["FromUser"] = params.FromUsers[0]
+ searchQuery = strings.Replace(searchQuery, "POST_FILTER", `
+ AND UserId IN (
+ SELECT
+ Id
+ FROM
+ Users,
+ TeamMembers
+ WHERE
+ TeamMembers.TeamId = :TeamId
+ AND Users.Id = TeamMembers.UserId
+ AND Username = :FromUser)`, 1)
+ } else {
+ searchQuery = strings.Replace(searchQuery, "POST_FILTER", "", 1)
+ }
+
+ if terms == "" {
+ // we've already confirmed that we have a channel or user to search for
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", "", 1)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ // Parse text for wildcards
+ if wildcard, err := regexp.Compile("\\*($| )"); err == nil {
+ terms = wildcard.ReplaceAllLiteralString(terms, ":* ")
+ }
+
+ if params.OrTerms {
+ terms = strings.Join(strings.Fields(terms), " | ")
+ } else {
+ terms = strings.Join(strings.Fields(terms), " & ")
+ }
+
+ searchClause := fmt.Sprintf("AND %s @@ to_tsquery(:Terms)", searchType)
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", searchClause, 1)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ searchClause := fmt.Sprintf("AND MATCH (%s) AGAINST (:Terms IN BOOLEAN MODE)", searchType)
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", searchClause, 1)
+
+ if !params.OrTerms {
+ splitTerms := strings.Fields(terms)
+ for i, t := range strings.Fields(terms) {
+ splitTerms[i] = "+" + t
+ }
+
+ terms = strings.Join(splitTerms, " ")
+ }
+ }
+
+ queryParams["Terms"] = terms
+
+ list := model.NewPostList()
+
+ _, err := s.GetSearchReplica().Select(&posts, searchQuery, queryParams)
+ if err != nil {
+ l4g.Warn(utils.T("store.sql_post.search.warn"), err.Error())
+ // Don't return the error to the caller as it is of no use to the user. Instead return an empty set of search results.
+ } else {
+ for _, p := range posts {
+ if searchType == "Hashtags" {
+ exactMatch := false
+ for _, tag := range strings.Split(p.Hashtags, " ") {
+ if termMap[strings.ToUpper(tag)] {
+ exactMatch = true
+ }
+ }
+ if !exactMatch {
+ continue
+ }
+ }
+ list.AddPost(p)
+ list.AddOrder(p.Id)
+ }
+ }
+
+ list.MakeNonNil()
+
+ result.Data = list
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) AnalyticsUserCountsWithPostsByDay(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT DISTINCT
+ DATE(FROM_UNIXTIME(Posts.CreateAt / 1000)) AS Name,
+ COUNT(DISTINCT Posts.UserId) AS Value
+ FROM Posts`
+
+ if len(teamId) > 0 {
+ query += " INNER JOIN Channels ON Posts.ChannelId = Channels.Id AND Channels.TeamId = :TeamId AND"
+ } else {
+ query += " WHERE"
+ }
+
+ query += ` Posts.CreateAt >= :StartTime AND Posts.CreateAt <= :EndTime
+ GROUP BY DATE(FROM_UNIXTIME(Posts.CreateAt / 1000))
+ ORDER BY Name DESC
+ LIMIT 30`
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query =
+ `SELECT
+ TO_CHAR(DATE(TO_TIMESTAMP(Posts.CreateAt / 1000)), 'YYYY-MM-DD') AS Name, COUNT(DISTINCT Posts.UserId) AS Value
+ FROM Posts`
+
+ if len(teamId) > 0 {
+ query += " INNER JOIN Channels ON Posts.ChannelId = Channels.Id AND Channels.TeamId = :TeamId AND"
+ } else {
+ query += " WHERE"
+ }
+
+ query += ` Posts.CreateAt >= :StartTime AND Posts.CreateAt <= :EndTime
+ GROUP BY DATE(TO_TIMESTAMP(Posts.CreateAt / 1000))
+ ORDER BY Name DESC
+ LIMIT 30`
+ }
+
+ end := utils.MillisFromTime(utils.EndOfDay(utils.Yesterday()))
+ start := utils.MillisFromTime(utils.StartOfDay(utils.Yesterday().AddDate(0, 0, -31)))
+
+ var rows model.AnalyticsRows
+ _, err := s.GetReplica().Select(
+ &rows,
+ query,
+ map[string]interface{}{"TeamId": teamId, "StartTime": start, "EndTime": end})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.AnalyticsUserCountsWithPostsByDay", "store.sql_post.analytics_user_counts_posts_by_day.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = rows
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) AnalyticsPostCountsByDay(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT
+ DATE(FROM_UNIXTIME(Posts.CreateAt / 1000)) AS Name,
+ COUNT(Posts.Id) AS Value
+ FROM Posts`
+
+ if len(teamId) > 0 {
+ query += " INNER JOIN Channels ON Posts.ChannelId = Channels.Id AND Channels.TeamId = :TeamId AND"
+ } else {
+ query += " WHERE"
+ }
+
+ query += ` Posts.CreateAt <= :EndTime
+ AND Posts.CreateAt >= :StartTime
+ GROUP BY DATE(FROM_UNIXTIME(Posts.CreateAt / 1000))
+ ORDER BY Name DESC
+ LIMIT 30`
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query =
+ `SELECT
+ TO_CHAR(DATE(TO_TIMESTAMP(Posts.CreateAt / 1000)), 'YYYY-MM-DD') AS Name, Count(Posts.Id) AS Value
+ FROM Posts`
+
+ if len(teamId) > 0 {
+ query += " INNER JOIN Channels ON Posts.ChannelId = Channels.Id AND Channels.TeamId = :TeamId AND"
+ } else {
+ query += " WHERE"
+ }
+
+ query += ` Posts.CreateAt <= :EndTime
+ AND Posts.CreateAt >= :StartTime
+ GROUP BY DATE(TO_TIMESTAMP(Posts.CreateAt / 1000))
+ ORDER BY Name DESC
+ LIMIT 30`
+ }
+
+ end := utils.MillisFromTime(utils.EndOfDay(utils.Yesterday()))
+ start := utils.MillisFromTime(utils.StartOfDay(utils.Yesterday().AddDate(0, 0, -31)))
+
+ var rows model.AnalyticsRows
+ _, err := s.GetReplica().Select(
+ &rows,
+ query,
+ map[string]interface{}{"TeamId": teamId, "StartTime": start, "EndTime": end})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.AnalyticsPostCountsByDay", "store.sql_post.analytics_posts_count_by_day.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = rows
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) AnalyticsPostCount(teamId string, mustHaveFile bool, mustHaveHashtag bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT
+ COUNT(Posts.Id) AS Value
+ FROM
+ Posts,
+ Channels
+ WHERE
+ Posts.ChannelId = Channels.Id`
+
+ if len(teamId) > 0 {
+ query += " AND Channels.TeamId = :TeamId"
+ }
+
+ if mustHaveFile {
+ query += " AND (Posts.FileIds != '[]' OR Posts.Filenames != '[]')"
+ }
+
+ if mustHaveHashtag {
+ query += " AND Posts.Hashtags != ''"
+ }
+
+ if v, err := s.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlPostStore.AnalyticsPostCount", "store.sql_post.analytics_posts_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetPostsCreatedAt(channelId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := `SELECT * FROM Posts WHERE CreateAt = :CreateAt`
+
+ var posts []*model.Post
+ _, err := s.GetReplica().Select(&posts, query, map[string]interface{}{"CreateAt": time})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPostsCreatedAt", "store.sql_post.get_posts_created_att.app_error", nil, "channelId="+channelId+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = posts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetPostsByIds(postIds []string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ inClause := `'` + strings.Join(postIds, `', '`) + `'`
+
+ query := `SELECT * FROM Posts WHERE Id in (` + inClause + `) and DeleteAt = 0 ORDER BY CreateAt DESC`
+
+ var posts []*model.Post
+ _, err := s.GetReplica().Select(&posts, query, map[string]interface{}{})
+
+ if err != nil {
+ l4g.Error(err)
+ result.Err = model.NewAppError("SqlPostStore.GetPostsCreatedAt", "store.sql_post.get_posts_by_ids.app_error", nil, "", http.StatusInternalServerError)
+ } else {
+ result.Data = posts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) GetPostsBatchForIndexing(startTime int64, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var posts []*model.PostForIndexing
+ _, err1 := s.GetSearchReplica().Select(&posts,
+ `(SELECT
+ Posts.*,
+ Channels.TeamId,
+ ParentPosts.CreateAt ParentCreateAt
+ FROM
+ Posts
+ LEFT JOIN
+ Channels
+ ON
+ Posts.ChannelId = Channels.Id
+ LEFT JOIN
+ Posts ParentPosts
+ ON
+ Posts.RootId = ParentPosts.Id
+ WHERE
+ Posts.CreateAt >= :StartTime
+ ORDER BY CreateAt ASC
+ LIMIT :NumPosts)`,
+ map[string]interface{}{"StartTime": startTime, "NumPosts": limit})
+
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlPostStore.GetPostContext", "store.sql_post.get_posts_batch_for_indexing.get.app_error", nil, err1.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = posts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPostStore) PermanentDeleteBatch(endTime int64, limit int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var query string
+ if *utils.Cfg.SqlSettings.DriverName == "postgres" {
+ query = "DELETE from Posts WHERE Id = any (array (SELECT Id FROM Posts WHERE CreateAt < :EndTime LIMIT :Limit))"
+ } else {
+ query = "DELETE from Posts WHERE CreateAt < :EndTime LIMIT :Limit"
+ }
+
+ sqlResult, err := s.GetMaster().Exec(query, map[string]interface{}{"EndTime": endTime, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.PermanentDeleteBatch", "store.sql_post.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ } else {
+ rowsAffected, err1 := sqlResult.RowsAffected()
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlPostStore.PermanentDeleteBatch", "store.sql_post.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ result.Data = int64(0)
+ } else {
+ result.Data = rowsAffected
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/post_store_test.go b/store/sqlstore/post_store_test.go
new file mode 100644
index 000000000..7be37584b
--- /dev/null
+++ b/store/sqlstore/post_store_test.go
@@ -0,0 +1,1703 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+func TestPostStoreSave(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+
+ if err := (<-ss.Post().Save(&o1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Post().Save(&o1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+}
+
+func TestPostStoreGet(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+
+ etag1 := (<-ss.Post().GetEtag(o1.ChannelId, false)).Data.(string)
+ if strings.Index(etag1, model.CurrentVersion+".") != 0 {
+ t.Fatal("Invalid Etag")
+ }
+
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ etag2 := (<-ss.Post().GetEtag(o1.ChannelId, false)).Data.(string)
+ if strings.Index(etag2, fmt.Sprintf("%v.%v", model.CurrentVersion, o1.UpdateAt)) != 0 {
+ t.Fatal("Invalid Etag")
+ }
+
+ if r1 := <-ss.Post().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.PostList).Posts[o1.Id].CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned post")
+ }
+ }
+
+ if err := (<-ss.Post().Get("123")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+
+ if err := (<-ss.Post().Get("")).Err; err == nil {
+ t.Fatal("should fail for blank post ids")
+ }
+}
+
+func TestPostStoreGetSingle(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ if r1 := <-ss.Post().GetSingle(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Post).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned post")
+ }
+ }
+
+ if err := (<-ss.Post().GetSingle("123")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestGetEtagCache(t *testing.T) {
+ ss := Setup()
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+
+ etag1 := (<-ss.Post().GetEtag(o1.ChannelId, true)).Data.(string)
+ if strings.Index(etag1, model.CurrentVersion+".") != 0 {
+ t.Fatal("Invalid Etag")
+ }
+
+ // This one should come from the cache
+ etag2 := (<-ss.Post().GetEtag(o1.ChannelId, true)).Data.(string)
+ if strings.Index(etag2, model.CurrentVersion+".") != 0 {
+ t.Fatal("Invalid Etag")
+ }
+
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ // We have not invalidated the cache so this should be the same as above
+ etag3 := (<-ss.Post().GetEtag(o1.ChannelId, true)).Data.(string)
+ if strings.Index(etag3, etag2) != 0 {
+ t.Fatal("Invalid Etag")
+ }
+
+ ss.Post().InvalidateLastPostTimeCache(o1.ChannelId)
+
+ // Invalidated cache so we should get a good result
+ etag4 := (<-ss.Post().GetEtag(o1.ChannelId, true)).Data.(string)
+ if strings.Index(etag4, fmt.Sprintf("%v.%v", model.CurrentVersion, o1.UpdateAt)) != 0 {
+ t.Fatal("Invalid Etag")
+ }
+}
+
+func TestPostStoreUpdate(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "CCCCCCCCC"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "QQQQQQQQQQ"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ ro1 := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o1.Id]
+ ro2 := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o2.Id]
+ ro3 := (<-ss.Post().Get(o3.Id)).Data.(*model.PostList).Posts[o3.Id]
+
+ if ro1.Message != o1.Message {
+ t.Fatal("Failed to save/get")
+ }
+
+ o1a := &model.Post{}
+ *o1a = *ro1
+ o1a.Message = ro1.Message + "BBBBBBBBBB"
+ if result := <-ss.Post().Update(o1a, ro1); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ro1a := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o1.Id]
+
+ if ro1a.Message != o1a.Message {
+ t.Fatal("Failed to update/get")
+ }
+
+ o2a := &model.Post{}
+ *o2a = *ro2
+ o2a.Message = ro2.Message + "DDDDDDD"
+ if result := <-ss.Post().Update(o2a, ro2); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ro2a := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o2.Id]
+
+ if ro2a.Message != o2a.Message {
+ t.Fatal("Failed to update/get")
+ }
+
+ o3a := &model.Post{}
+ *o3a = *ro3
+ o3a.Message = ro3.Message + "WWWWWWW"
+ if result := <-ss.Post().Update(o3a, ro3); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ro3a := (<-ss.Post().Get(o3.Id)).Data.(*model.PostList).Posts[o3.Id]
+
+ if ro3a.Message != o3a.Message && ro3a.Hashtags != o3a.Hashtags {
+ t.Fatal("Failed to update/get")
+ }
+
+ o4 := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ Message: model.NewId(),
+ Filenames: []string{"test"},
+ })).(*model.Post)
+
+ ro4 := (<-ss.Post().Get(o4.Id)).Data.(*model.PostList).Posts[o4.Id]
+
+ o4a := &model.Post{}
+ *o4a = *ro4
+ o4a.Filenames = []string{}
+ o4a.FileIds = []string{model.NewId()}
+ if result := <-ss.Post().Update(o4a, ro4); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if ro4a := store.Must(ss.Post().Get(o4.Id)).(*model.PostList).Posts[o4.Id]; len(ro4a.Filenames) != 0 {
+ t.Fatal("Failed to clear Filenames")
+ } else if len(ro4a.FileIds) != 1 {
+ t.Fatal("Failed to set FileIds")
+ }
+}
+
+func TestPostStoreDelete(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+
+ etag1 := (<-ss.Post().GetEtag(o1.ChannelId, false)).Data.(string)
+ if strings.Index(etag1, model.CurrentVersion+".") != 0 {
+ t.Fatal("Invalid Etag")
+ }
+
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ if r1 := <-ss.Post().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.PostList).Posts[o1.Id].CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned post")
+ }
+ }
+
+ if r2 := <-ss.Post().Delete(o1.Id, model.GetMillis()); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Post().Get(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+
+ etag2 := (<-ss.Post().GetEtag(o1.ChannelId, false)).Data.(string)
+ if strings.Index(etag2, model.CurrentVersion+".") != 0 {
+ t.Fatal("Invalid Etag")
+ }
+}
+
+func TestPostStoreDelete1Level(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ if r2 := <-ss.Post().Delete(o1.Id, model.GetMillis()); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Post().Get(o1.Id)); r3.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r4 := (<-ss.Post().Get(o2.Id)); r4.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+}
+
+func TestPostStoreDelete2Level(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.ParentId = o2.Id
+ o3.RootId = o1.Id
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ o4 := &model.Post{}
+ o4.ChannelId = model.NewId()
+ o4.UserId = model.NewId()
+ o4.Message = "zz" + model.NewId() + "b"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+
+ if r2 := <-ss.Post().Delete(o1.Id, model.GetMillis()); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Post().Get(o1.Id)); r3.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r4 := (<-ss.Post().Get(o2.Id)); r4.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r5 := (<-ss.Post().Get(o3.Id)); r5.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r6 := <-ss.Post().Get(o4.Id); r6.Err != nil {
+ t.Fatal(r6.Err)
+ }
+}
+
+func TestPostStorePermDelete1Level(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = model.NewId()
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ if r2 := <-ss.Post().PermanentDeleteByUser(o2.UserId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Post().Get(o1.Id)); r3.Err != nil {
+ t.Fatal("Deleted id shouldn't have failed")
+ }
+
+ if r4 := (<-ss.Post().Get(o2.Id)); r4.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r2 := <-ss.Post().PermanentDeleteByChannel(o3.ChannelId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Post().Get(o3.Id)); r3.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+}
+
+func TestPostStorePermDelete1Level2(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = model.NewId()
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ if r2 := <-ss.Post().PermanentDeleteByUser(o1.UserId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Post().Get(o1.Id)); r3.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r4 := (<-ss.Post().Get(o2.Id)); r4.Err == nil {
+ t.Fatal("Deleted id should have failed")
+ }
+
+ if r5 := (<-ss.Post().Get(o3.Id)); r5.Err != nil {
+ t.Fatal("Deleted id shouldn't have failed")
+ }
+}
+
+func TestPostStoreGetWithChildren(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.ParentId = o2.Id
+ o3.RootId = o1.Id
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ if r1 := <-ss.Post().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ pl := r1.Data.(*model.PostList)
+ if len(pl.Posts) != 3 {
+ t.Fatal("invalid returned post")
+ }
+ }
+
+ store.Must(ss.Post().Delete(o3.Id, model.GetMillis()))
+
+ if r2 := <-ss.Post().Get(o1.Id); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ pl := r2.Data.(*model.PostList)
+ if len(pl.Posts) != 2 {
+ t.Fatal("invalid returned post")
+ }
+ }
+
+ store.Must(ss.Post().Delete(o2.Id, model.GetMillis()))
+
+ if r3 := <-ss.Post().Get(o1.Id); r3.Err != nil {
+ t.Fatal(r3.Err)
+ } else {
+ pl := r3.Data.(*model.PostList)
+ if len(pl.Posts) != 1 {
+ t.Fatal("invalid returned post")
+ }
+ }
+}
+
+func TestPostStoreGetPostsWtihDetails(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = o1.ChannelId
+ o2a.UserId = model.NewId()
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a.ParentId = o1.Id
+ o2a.RootId = o1.Id
+ o2a = (<-ss.Post().Save(o2a)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.ParentId = o1.Id
+ o3.RootId = o1.Id
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o4 := &model.Post{}
+ o4.ChannelId = o1.ChannelId
+ o4.UserId = model.NewId()
+ o4.Message = "zz" + model.NewId() + "b"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o5 := &model.Post{}
+ o5.ChannelId = o1.ChannelId
+ o5.UserId = model.NewId()
+ o5.Message = "zz" + model.NewId() + "b"
+ o5.ParentId = o4.Id
+ o5.RootId = o4.Id
+ o5 = (<-ss.Post().Save(o5)).Data.(*model.Post)
+
+ r1 := (<-ss.Post().GetPosts(o1.ChannelId, 0, 4, false)).Data.(*model.PostList)
+
+ if r1.Order[0] != o5.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r1.Order[1] != o4.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r1.Order[2] != o3.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r1.Order[3] != o2a.Id {
+ t.Fatal("invalid order")
+ }
+
+ if len(r1.Posts) != 6 { //the last 4, + o1 (o2a and o3's parent) + o2 (in same thread as o2a and o3)
+ t.Fatal("wrong size")
+ }
+
+ if r1.Posts[o1.Id].Message != o1.Message {
+ t.Fatal("Missing parent")
+ }
+
+ r2 := (<-ss.Post().GetPosts(o1.ChannelId, 0, 4, true)).Data.(*model.PostList)
+
+ if r2.Order[0] != o5.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r2.Order[1] != o4.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r2.Order[2] != o3.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r2.Order[3] != o2a.Id {
+ t.Fatal("invalid order")
+ }
+
+ if len(r2.Posts) != 6 { //the last 4, + o1 (o2a and o3's parent) + o2 (in same thread as o2a and o3)
+ t.Fatal("wrong size")
+ }
+
+ if r2.Posts[o1.Id].Message != o1.Message {
+ t.Fatal("Missing parent")
+ }
+}
+
+func TestPostStoreGetPostsBeforeAfter(t *testing.T) {
+ ss := Setup()
+ o0 := &model.Post{}
+ o0.ChannelId = model.NewId()
+ o0.UserId = model.NewId()
+ o0.Message = "zz" + model.NewId() + "b"
+ o0 = (<-ss.Post().Save(o0)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = o1.ChannelId
+ o2a.UserId = model.NewId()
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a.ParentId = o1.Id
+ o2a.RootId = o1.Id
+ o2a = (<-ss.Post().Save(o2a)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.ParentId = o1.Id
+ o3.RootId = o1.Id
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o4 := &model.Post{}
+ o4.ChannelId = o1.ChannelId
+ o4.UserId = model.NewId()
+ o4.Message = "zz" + model.NewId() + "b"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o5 := &model.Post{}
+ o5.ChannelId = o1.ChannelId
+ o5.UserId = model.NewId()
+ o5.Message = "zz" + model.NewId() + "b"
+ o5.ParentId = o4.Id
+ o5.RootId = o4.Id
+ o5 = (<-ss.Post().Save(o5)).Data.(*model.Post)
+
+ r1 := (<-ss.Post().GetPostsBefore(o1.ChannelId, o1.Id, 4, 0)).Data.(*model.PostList)
+
+ if len(r1.Posts) != 0 {
+ t.Fatal("Wrong size")
+ }
+
+ r2 := (<-ss.Post().GetPostsAfter(o1.ChannelId, o1.Id, 4, 0)).Data.(*model.PostList)
+
+ if r2.Order[0] != o4.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r2.Order[1] != o3.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r2.Order[2] != o2a.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r2.Order[3] != o2.Id {
+ t.Fatal("invalid order")
+ }
+
+ if len(r2.Posts) != 5 {
+ t.Fatal("wrong size")
+ }
+
+ r3 := (<-ss.Post().GetPostsBefore(o3.ChannelId, o3.Id, 2, 0)).Data.(*model.PostList)
+
+ if r3.Order[0] != o2a.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r3.Order[1] != o2.Id {
+ t.Fatal("invalid order")
+ }
+
+ if len(r3.Posts) != 3 {
+ t.Fatal("wrong size")
+ }
+
+ if r3.Posts[o1.Id].Message != o1.Message {
+ t.Fatal("Missing parent")
+ }
+}
+
+func TestPostStoreGetPostsSince(t *testing.T) {
+ ss := Setup()
+ o0 := &model.Post{}
+ o0.ChannelId = model.NewId()
+ o0.UserId = model.NewId()
+ o0.Message = "zz" + model.NewId() + "b"
+ o0 = (<-ss.Post().Save(o0)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = o1.ChannelId
+ o2a.UserId = model.NewId()
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a.ParentId = o1.Id
+ o2a.RootId = o1.Id
+ o2a = (<-ss.Post().Save(o2a)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.ParentId = o1.Id
+ o3.RootId = o1.Id
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o4 := &model.Post{}
+ o4.ChannelId = o1.ChannelId
+ o4.UserId = model.NewId()
+ o4.Message = "zz" + model.NewId() + "b"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o5 := &model.Post{}
+ o5.ChannelId = o1.ChannelId
+ o5.UserId = model.NewId()
+ o5.Message = "zz" + model.NewId() + "b"
+ o5.ParentId = o4.Id
+ o5.RootId = o4.Id
+ o5 = (<-ss.Post().Save(o5)).Data.(*model.Post)
+
+ r1 := (<-ss.Post().GetPostsSince(o1.ChannelId, o1.CreateAt, false)).Data.(*model.PostList)
+
+ if r1.Order[0] != o5.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r1.Order[1] != o4.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r1.Order[2] != o3.Id {
+ t.Fatal("invalid order")
+ }
+
+ if r1.Order[3] != o2a.Id {
+ t.Fatal("invalid order")
+ }
+
+ if len(r1.Posts) != 6 {
+ t.Fatal("wrong size")
+ }
+
+ if r1.Posts[o1.Id].Message != o1.Message {
+ t.Fatal("Missing parent")
+ }
+
+ r2 := (<-ss.Post().GetPostsSince(o1.ChannelId, o5.UpdateAt, true)).Data.(*model.PostList)
+
+ if len(r2.Order) != 0 {
+ t.Fatal("wrong size ", len(r2.Posts))
+ }
+}
+
+func TestPostStoreSearch(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+ userId := model.NewId()
+
+ c1 := &model.Channel{}
+ c1.TeamId = teamId
+ c1.DisplayName = "Channel1"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = (<-ss.Channel().Save(c1)).Data.(*model.Channel)
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = c1.Id
+ m1.UserId = userId
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+ store.Must(ss.Channel().SaveMember(&m1))
+
+ c2 := &model.Channel{}
+ c2.TeamId = teamId
+ c2.DisplayName = "Channel1"
+ c2.Name = "zz" + model.NewId() + "b"
+ c2.Type = model.CHANNEL_OPEN
+ c2 = (<-ss.Channel().Save(c2)).Data.(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = model.NewId()
+ o1.Message = "corey mattermost new york"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o1a := &model.Post{}
+ o1a.ChannelId = c1.Id
+ o1a.UserId = model.NewId()
+ o1a.Message = "corey mattermost new york"
+ o1a.Type = model.POST_JOIN_CHANNEL
+ o1a = (<-ss.Post().Save(o1a)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c1.Id
+ o2.UserId = model.NewId()
+ o2.Message = "New Jersey is where John is from"
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = c2.Id
+ o3.UserId = model.NewId()
+ o3.Message = "New Jersey is where John is from corey new york"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ o4 := &model.Post{}
+ o4.ChannelId = c1.Id
+ o4.UserId = model.NewId()
+ o4.Hashtags = "#hashtag"
+ o4.Message = "(message)blargh"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+
+ o5 := &model.Post{}
+ o5.ChannelId = c1.Id
+ o5.UserId = model.NewId()
+ o5.Hashtags = "#secret #howdy"
+ o5 = (<-ss.Post().Save(o5)).Data.(*model.Post)
+
+ r1 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "corey", IsHashtag: false})).Data.(*model.PostList)
+ if len(r1.Order) != 1 || r1.Order[0] != o1.Id {
+ t.Fatal("returned wrong search result")
+ }
+
+ r3 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "new", IsHashtag: false})).Data.(*model.PostList)
+ if len(r3.Order) != 2 || (r3.Order[0] != o1.Id && r3.Order[1] != o1.Id) {
+ t.Fatal("returned wrong search result")
+ }
+
+ r4 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "john", IsHashtag: false})).Data.(*model.PostList)
+ if len(r4.Order) != 1 || r4.Order[0] != o2.Id {
+ t.Fatal("returned wrong search result")
+ }
+
+ r5 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "matter*", IsHashtag: false})).Data.(*model.PostList)
+ if len(r5.Order) != 1 || r5.Order[0] != o1.Id {
+ t.Fatal("returned wrong search result")
+ }
+
+ r6 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "#hashtag", IsHashtag: true})).Data.(*model.PostList)
+ if len(r6.Order) != 1 || r6.Order[0] != o4.Id {
+ t.Fatal("returned wrong search result")
+ }
+
+ r7 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "#secret", IsHashtag: true})).Data.(*model.PostList)
+ if len(r7.Order) != 1 || r7.Order[0] != o5.Id {
+ t.Fatal("returned wrong search result")
+ }
+
+ r8 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "@thisshouldmatchnothing", IsHashtag: true})).Data.(*model.PostList)
+ if len(r8.Order) != 0 {
+ t.Fatal("returned wrong search result")
+ }
+
+ r9 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "mattermost jersey", IsHashtag: false})).Data.(*model.PostList)
+ if len(r9.Order) != 0 {
+ t.Fatal("returned wrong search result")
+ }
+
+ r9a := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "corey new york", IsHashtag: false})).Data.(*model.PostList)
+ if len(r9a.Order) != 1 {
+ t.Fatal("returned wrong search result")
+ }
+
+ r10 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "matter* jer*", IsHashtag: false})).Data.(*model.PostList)
+ if len(r10.Order) != 0 {
+ t.Fatal("returned wrong search result")
+ }
+
+ r11 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "message blargh", IsHashtag: false})).Data.(*model.PostList)
+ if len(r11.Order) != 1 {
+ t.Fatal("returned wrong search result")
+ }
+
+ r12 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "blargh>", IsHashtag: false})).Data.(*model.PostList)
+ if len(r12.Order) != 1 {
+ t.Fatal("returned wrong search result")
+ }
+
+ r13 := (<-ss.Post().Search(teamId, userId, &model.SearchParams{Terms: "Jersey corey", IsHashtag: false, OrTerms: true})).Data.(*model.PostList)
+ if len(r13.Order) != 2 {
+ t.Fatal("returned wrong search result")
+ }
+}
+
+func TestUserCountsWithPostsByDay(t *testing.T) {
+ ss := Setup()
+
+ t1 := &model.Team{}
+ t1.DisplayName = "DisplayName"
+ t1.Name = "zz" + model.NewId() + "b"
+ t1.Email = model.NewId() + "@nowhere.com"
+ t1.Type = model.TEAM_OPEN
+ t1 = store.Must(ss.Team().Save(t1)).(*model.Team)
+
+ c1 := &model.Channel{}
+ c1.TeamId = t1.Id
+ c1.DisplayName = "Channel2"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = store.Must(ss.Channel().Save(c1)).(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = model.NewId()
+ o1.CreateAt = utils.MillisFromTime(utils.Yesterday())
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = store.Must(ss.Post().Save(o1)).(*model.Post)
+
+ o1a := &model.Post{}
+ o1a.ChannelId = c1.Id
+ o1a.UserId = model.NewId()
+ o1a.CreateAt = o1.CreateAt
+ o1a.Message = "zz" + model.NewId() + "b"
+ o1a = store.Must(ss.Post().Save(o1a)).(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c1.Id
+ o2.UserId = model.NewId()
+ o2.CreateAt = o1.CreateAt - (1000 * 60 * 60 * 24)
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = store.Must(ss.Post().Save(o2)).(*model.Post)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = c1.Id
+ o2a.UserId = o2.UserId
+ o2a.CreateAt = o1.CreateAt - (1000 * 60 * 60 * 24)
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a = store.Must(ss.Post().Save(o2a)).(*model.Post)
+
+ if r1 := <-ss.Post().AnalyticsUserCountsWithPostsByDay(t1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ row1 := r1.Data.(model.AnalyticsRows)[0]
+ if row1.Value != 2 {
+ t.Fatal("wrong value")
+ }
+
+ row2 := r1.Data.(model.AnalyticsRows)[1]
+ if row2.Value != 1 {
+ t.Fatal("wrong value")
+ }
+ }
+}
+
+func TestPostCountsByDay(t *testing.T) {
+ ss := Setup()
+
+ t1 := &model.Team{}
+ t1.DisplayName = "DisplayName"
+ t1.Name = "zz" + model.NewId() + "b"
+ t1.Email = model.NewId() + "@nowhere.com"
+ t1.Type = model.TEAM_OPEN
+ t1 = store.Must(ss.Team().Save(t1)).(*model.Team)
+
+ c1 := &model.Channel{}
+ c1.TeamId = t1.Id
+ c1.DisplayName = "Channel2"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = store.Must(ss.Channel().Save(c1)).(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = model.NewId()
+ o1.CreateAt = utils.MillisFromTime(utils.Yesterday())
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = store.Must(ss.Post().Save(o1)).(*model.Post)
+
+ o1a := &model.Post{}
+ o1a.ChannelId = c1.Id
+ o1a.UserId = model.NewId()
+ o1a.CreateAt = o1.CreateAt
+ o1a.Message = "zz" + model.NewId() + "b"
+ o1a = store.Must(ss.Post().Save(o1a)).(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c1.Id
+ o2.UserId = model.NewId()
+ o2.CreateAt = o1.CreateAt - (1000 * 60 * 60 * 24 * 2)
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = store.Must(ss.Post().Save(o2)).(*model.Post)
+
+ o2a := &model.Post{}
+ o2a.ChannelId = c1.Id
+ o2a.UserId = o2.UserId
+ o2a.CreateAt = o1.CreateAt - (1000 * 60 * 60 * 24 * 2)
+ o2a.Message = "zz" + model.NewId() + "b"
+ o2a = store.Must(ss.Post().Save(o2a)).(*model.Post)
+
+ time.Sleep(1 * time.Second)
+
+ if r1 := <-ss.Post().AnalyticsPostCountsByDay(t1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ row1 := r1.Data.(model.AnalyticsRows)[0]
+ if row1.Value != 2 {
+ t.Fatal(row1)
+ }
+
+ row2 := r1.Data.(model.AnalyticsRows)[1]
+ if row2.Value != 2 {
+ t.Fatal("wrong value")
+ }
+ }
+
+ if r1 := <-ss.Post().AnalyticsPostCount(t1.Id, false, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) != 4 {
+ t.Fatal("wrong value")
+ }
+ }
+}
+
+func TestPostStoreGetFlaggedPostsForTeam(t *testing.T) {
+ ss := Setup()
+
+ c1 := &model.Channel{}
+ c1.TeamId = model.NewId()
+ c1.DisplayName = "Channel1"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = store.Must(ss.Channel().Save(c1)).(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.DeleteAt = 1
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o4 := &model.Post{}
+ o4.ChannelId = model.NewId()
+ o4.UserId = model.NewId()
+ o4.Message = "zz" + model.NewId() + "b"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ c2 := &model.Channel{}
+ c2.DisplayName = "DMChannel1"
+ c2.Name = "zz" + model.NewId() + "b"
+ c2.Type = model.CHANNEL_DIRECT
+
+ m1 := &model.ChannelMember{}
+ m1.ChannelId = c2.Id
+ m1.UserId = o1.UserId
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := &model.ChannelMember{}
+ m2.ChannelId = c2.Id
+ m2.UserId = model.NewId()
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ c2 = store.Must(ss.Channel().SaveDirectChannel(c2, m1, m2)).(*model.Channel)
+
+ o5 := &model.Post{}
+ o5.ChannelId = c2.Id
+ o5.UserId = m2.UserId
+ o5.Message = "zz" + model.NewId() + "b"
+ o5 = (<-ss.Post().Save(o5)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ r1 := (<-ss.Post().GetFlaggedPosts(o1.ChannelId, 0, 2)).Data.(*model.PostList)
+
+ if len(r1.Order) != 0 {
+ t.Fatal("should be empty")
+ }
+
+ preferences := model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o1.Id,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ r2 := (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 0, 2)).Data.(*model.PostList)
+
+ if len(r2.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ preferences = model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o2.Id,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ r3 := (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 0, 1)).Data.(*model.PostList)
+
+ if len(r3.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ r3 = (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 1, 1)).Data.(*model.PostList)
+
+ if len(r3.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ r3 = (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 1000, 10)).Data.(*model.PostList)
+
+ if len(r3.Order) != 0 {
+ t.Fatal("should be empty")
+ }
+
+ r4 := (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 0, 2)).Data.(*model.PostList)
+
+ if len(r4.Order) != 2 {
+ t.Fatal("should have 2 posts")
+ }
+
+ preferences = model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o3.Id,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ r4 = (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 0, 2)).Data.(*model.PostList)
+
+ if len(r4.Order) != 2 {
+ t.Fatal("should have 2 posts")
+ }
+
+ preferences = model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o4.Id,
+ Value: "true",
+ },
+ }
+ store.Must(ss.Preference().Save(&preferences))
+
+ r4 = (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 0, 2)).Data.(*model.PostList)
+
+ if len(r4.Order) != 2 {
+ t.Fatal("should have 2 posts")
+ }
+
+ r4 = (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, model.NewId(), 0, 2)).Data.(*model.PostList)
+
+ if len(r4.Order) != 0 {
+ t.Fatal("should have 0 posts")
+ }
+
+ preferences = model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o5.Id,
+ Value: "true",
+ },
+ }
+ store.Must(ss.Preference().Save(&preferences))
+
+ r4 = (<-ss.Post().GetFlaggedPostsForTeam(o1.UserId, c1.TeamId, 0, 10)).Data.(*model.PostList)
+
+ if len(r4.Order) != 3 {
+ t.Fatal("should have 3 posts")
+ }
+}
+
+func TestPostStoreGetFlaggedPosts(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.DeleteAt = 1
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ r1 := (<-ss.Post().GetFlaggedPosts(o1.UserId, 0, 2)).Data.(*model.PostList)
+
+ if len(r1.Order) != 0 {
+ t.Fatal("should be empty")
+ }
+
+ preferences := model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o1.Id,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ r2 := (<-ss.Post().GetFlaggedPosts(o1.UserId, 0, 2)).Data.(*model.PostList)
+
+ if len(r2.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ preferences = model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o2.Id,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ r3 := (<-ss.Post().GetFlaggedPosts(o1.UserId, 0, 1)).Data.(*model.PostList)
+
+ if len(r3.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ r3 = (<-ss.Post().GetFlaggedPosts(o1.UserId, 1, 1)).Data.(*model.PostList)
+
+ if len(r3.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ r3 = (<-ss.Post().GetFlaggedPosts(o1.UserId, 1000, 10)).Data.(*model.PostList)
+
+ if len(r3.Order) != 0 {
+ t.Fatal("should be empty")
+ }
+
+ r4 := (<-ss.Post().GetFlaggedPosts(o1.UserId, 0, 2)).Data.(*model.PostList)
+
+ if len(r4.Order) != 2 {
+ t.Fatal("should have 2 posts")
+ }
+
+ preferences = model.Preferences{
+ {
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o3.Id,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ r4 = (<-ss.Post().GetFlaggedPosts(o1.UserId, 0, 2)).Data.(*model.PostList)
+
+ if len(r4.Order) != 2 {
+ t.Fatal("should have 2 posts")
+ }
+}
+
+func TestPostStoreGetFlaggedPostsForChannel(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ // deleted post
+ o3 := &model.Post{}
+ o3.ChannelId = model.NewId()
+ o3.UserId = o1.ChannelId
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.DeleteAt = 1
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o4 := &model.Post{}
+ o4.ChannelId = model.NewId()
+ o4.UserId = model.NewId()
+ o4.Message = "zz" + model.NewId() + "b"
+ o4 = (<-ss.Post().Save(o4)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ r := (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o1.ChannelId, 0, 10)).Data.(*model.PostList)
+
+ if len(r.Order) != 0 {
+ t.Fatal("should be empty")
+ }
+
+ preference := model.Preference{
+ UserId: o1.UserId,
+ Category: model.PREFERENCE_CATEGORY_FLAGGED_POST,
+ Name: o1.Id,
+ Value: "true",
+ }
+
+ store.Must(ss.Preference().Save(&model.Preferences{preference}))
+
+ r = (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o1.ChannelId, 0, 10)).Data.(*model.PostList)
+
+ if len(r.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ preference.Name = o2.Id
+ store.Must(ss.Preference().Save(&model.Preferences{preference}))
+
+ preference.Name = o3.Id
+ store.Must(ss.Preference().Save(&model.Preferences{preference}))
+
+ r = (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o1.ChannelId, 0, 1)).Data.(*model.PostList)
+
+ if len(r.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ r = (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o1.ChannelId, 1, 1)).Data.(*model.PostList)
+
+ if len(r.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+
+ r = (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o1.ChannelId, 1000, 10)).Data.(*model.PostList)
+
+ if len(r.Order) != 0 {
+ t.Fatal("should be empty")
+ }
+
+ r = (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o1.ChannelId, 0, 10)).Data.(*model.PostList)
+
+ if len(r.Order) != 2 {
+ t.Fatal("should have 2 posts")
+ }
+
+ preference.Name = o4.Id
+ store.Must(ss.Preference().Save(&model.Preferences{preference}))
+
+ r = (<-ss.Post().GetFlaggedPostsForChannel(o1.UserId, o4.ChannelId, 0, 10)).Data.(*model.PostList)
+
+ if len(r.Order) != 1 {
+ t.Fatal("should have 1 post")
+ }
+}
+
+func TestPostStoreGetPostsCreatedAt(t *testing.T) {
+ ss := Setup()
+
+ createTime := model.GetMillis()
+
+ o0 := &model.Post{}
+ o0.ChannelId = model.NewId()
+ o0.UserId = model.NewId()
+ o0.Message = "zz" + model.NewId() + "b"
+ o0.CreateAt = createTime
+ o0 = (<-ss.Post().Save(o0)).Data.(*model.Post)
+
+ o1 := &model.Post{}
+ o1.ChannelId = o0.Id
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "b"
+ o0.CreateAt = createTime
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "b"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ o3 := &model.Post{}
+ o3.ChannelId = model.NewId()
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "b"
+ o3.CreateAt = createTime
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+ time.Sleep(2 * time.Millisecond)
+
+ r1 := (<-ss.Post().GetPostsCreatedAt(o1.ChannelId, createTime)).Data.([]*model.Post)
+
+ if len(r1) != 2 {
+ t.Fatalf("Got the wrong number of posts.")
+ }
+}
+
+func TestPostStoreOverwrite(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "CCCCCCCCC"
+ o2.ParentId = o1.Id
+ o2.RootId = o1.Id
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "QQQQQQQQQQ"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ ro1 := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o1.Id]
+ ro2 := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o2.Id]
+ ro3 := (<-ss.Post().Get(o3.Id)).Data.(*model.PostList).Posts[o3.Id]
+
+ if ro1.Message != o1.Message {
+ t.Fatal("Failed to save/get")
+ }
+
+ o1a := &model.Post{}
+ *o1a = *ro1
+ o1a.Message = ro1.Message + "BBBBBBBBBB"
+ if result := <-ss.Post().Overwrite(o1a); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ro1a := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o1.Id]
+
+ if ro1a.Message != o1a.Message {
+ t.Fatal("Failed to overwrite/get")
+ }
+
+ o2a := &model.Post{}
+ *o2a = *ro2
+ o2a.Message = ro2.Message + "DDDDDDD"
+ if result := <-ss.Post().Overwrite(o2a); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ro2a := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o2.Id]
+
+ if ro2a.Message != o2a.Message {
+ t.Fatal("Failed to overwrite/get")
+ }
+
+ o3a := &model.Post{}
+ *o3a = *ro3
+ o3a.Message = ro3.Message + "WWWWWWW"
+ if result := <-ss.Post().Overwrite(o3a); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ ro3a := (<-ss.Post().Get(o3.Id)).Data.(*model.PostList).Posts[o3.Id]
+
+ if ro3a.Message != o3a.Message && ro3a.Hashtags != o3a.Hashtags {
+ t.Fatal("Failed to overwrite/get")
+ }
+
+ o4 := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ Message: model.NewId(),
+ Filenames: []string{"test"},
+ })).(*model.Post)
+
+ ro4 := (<-ss.Post().Get(o4.Id)).Data.(*model.PostList).Posts[o4.Id]
+
+ o4a := &model.Post{}
+ *o4a = *ro4
+ o4a.Filenames = []string{}
+ o4a.FileIds = []string{model.NewId()}
+ if result := <-ss.Post().Overwrite(o4a); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if ro4a := store.Must(ss.Post().Get(o4.Id)).(*model.PostList).Posts[o4.Id]; len(ro4a.Filenames) != 0 {
+ t.Fatal("Failed to clear Filenames")
+ } else if len(ro4a.FileIds) != 1 {
+ t.Fatal("Failed to set FileIds")
+ }
+}
+
+func TestPostStoreGetPostsByIds(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = o1.ChannelId
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "CCCCCCCCC"
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = o1.ChannelId
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "QQQQQQQQQQ"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ ro1 := (<-ss.Post().Get(o1.Id)).Data.(*model.PostList).Posts[o1.Id]
+ ro2 := (<-ss.Post().Get(o2.Id)).Data.(*model.PostList).Posts[o2.Id]
+ ro3 := (<-ss.Post().Get(o3.Id)).Data.(*model.PostList).Posts[o3.Id]
+
+ postIds := []string{
+ ro1.Id,
+ ro2.Id,
+ ro3.Id,
+ }
+
+ if ro4 := store.Must(ss.Post().GetPostsByIds(postIds)).([]*model.Post); len(ro4) != 3 {
+ t.Fatalf("Expected 3 posts in results. Got %v", len(ro4))
+ }
+
+ store.Must(ss.Post().Delete(ro1.Id, model.GetMillis()))
+
+ if ro5 := store.Must(ss.Post().GetPostsByIds(postIds)).([]*model.Post); len(ro5) != 2 {
+ t.Fatalf("Expected 2 posts in results. Got %v", len(ro5))
+ }
+}
+
+func TestPostStoreGetPostsBatchForIndexing(t *testing.T) {
+ ss := Setup()
+
+ c1 := &model.Channel{}
+ c1.TeamId = model.NewId()
+ c1.DisplayName = "Channel1"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = (<-ss.Channel().Save(c1)).Data.(*model.Channel)
+
+ c2 := &model.Channel{}
+ c2.TeamId = model.NewId()
+ c2.DisplayName = "Channel2"
+ c2.Name = "zz" + model.NewId() + "b"
+ c2.Type = model.CHANNEL_OPEN
+ c2 = (<-ss.Channel().Save(c2)).Data.(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c2.Id
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "CCCCCCCCC"
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = c1.Id
+ o3.UserId = model.NewId()
+ o3.ParentId = o1.Id
+ o3.RootId = o1.Id
+ o3.Message = "zz" + model.NewId() + "QQQQQQQQQQ"
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ if r := store.Must(ss.Post().GetPostsBatchForIndexing(o1.CreateAt, 100)).([]*model.PostForIndexing); len(r) != 3 {
+ t.Fatalf("Expected 3 posts in results. Got %v", len(r))
+ } else {
+ for _, p := range r {
+ if p.Id == o1.Id {
+ if p.TeamId != c1.TeamId {
+ t.Fatalf("Unexpected team ID")
+ }
+ if p.ParentCreateAt != nil {
+ t.Fatalf("Unexpected parent create at")
+ }
+ } else if p.Id == o2.Id {
+ if p.TeamId != c2.TeamId {
+ t.Fatalf("Unexpected team ID")
+ }
+ if p.ParentCreateAt != nil {
+ t.Fatalf("Unexpected parent create at")
+ }
+ } else if p.Id == o3.Id {
+ if p.TeamId != c1.TeamId {
+ t.Fatalf("Unexpected team ID")
+ }
+ if *p.ParentCreateAt != o1.CreateAt {
+ t.Fatalf("Unexpected parent create at")
+ }
+ } else {
+ t.Fatalf("unexpected post returned")
+ }
+ }
+ }
+}
+
+func TestPostStorePermanentDeleteBatch(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1.CreateAt = 1000
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = model.NewId()
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o2.CreateAt = 1000
+ o2 = (<-ss.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = model.NewId()
+ o3.UserId = model.NewId()
+ o3.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o3.CreateAt = 100000
+ o3 = (<-ss.Post().Save(o3)).Data.(*model.Post)
+
+ store.Must(ss.Post().PermanentDeleteBatch(2000, 1000))
+
+ if p := <-ss.Post().Get(o1.Id); p.Err == nil {
+ t.Fatalf("Should have not found post 1 after purge")
+ }
+
+ if p := <-ss.Post().Get(o2.Id); p.Err == nil {
+ t.Fatalf("Should have not found post 2 after purge")
+ }
+
+ if p := <-ss.Post().Get(o3.Id); p.Err != nil {
+ t.Fatalf("Should have found post 3 after purge")
+ }
+}
diff --git a/store/sqlstore/preference_store.go b/store/sqlstore/preference_store.go
new file mode 100644
index 000000000..2aab91386
--- /dev/null
+++ b/store/sqlstore/preference_store.go
@@ -0,0 +1,427 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ l4g "github.com/alecthomas/log4go"
+ "github.com/mattermost/gorp"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlPreferenceStore struct {
+ SqlStore
+}
+
+const (
+ FEATURE_TOGGLE_PREFIX = "feature_enabled_"
+)
+
+func NewSqlPreferenceStore(sqlStore SqlStore) store.PreferenceStore {
+ s := &SqlPreferenceStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Preference{}, "Preferences").SetKeys(false, "UserId", "Category", "Name")
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("Category").SetMaxSize(32)
+ table.ColMap("Name").SetMaxSize(32)
+ table.ColMap("Value").SetMaxSize(2000)
+ }
+
+ return s
+}
+
+func (s SqlPreferenceStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_preferences_user_id", "Preferences", "UserId")
+ s.CreateIndexIfNotExists("idx_preferences_category", "Preferences", "Category")
+ s.CreateIndexIfNotExists("idx_preferences_name", "Preferences", "Name")
+}
+
+func (s SqlPreferenceStore) DeleteUnusedFeatures() {
+ l4g.Debug(utils.T("store.sql_preference.delete_unused_features.debug"))
+
+ sql := `DELETE
+ FROM Preferences
+ WHERE
+ Category = :Category
+ AND Value = :Value
+ AND Name LIKE '` + FEATURE_TOGGLE_PREFIX + `%'`
+
+ queryParams := map[string]string{
+ "Category": model.PREFERENCE_CATEGORY_ADVANCED_SETTINGS,
+ "Value": "false",
+ }
+ s.GetMaster().Exec(sql, queryParams)
+}
+
+func (s SqlPreferenceStore) Save(preferences *model.Preferences) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ // wrap in a transaction so that if one fails, everything fails
+ transaction, err := s.GetMaster().Begin()
+ if err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.Save", "store.sql_preference.save.open_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, preference := range *preferences {
+ if upsertResult := s.save(transaction, &preference); upsertResult.Err != nil {
+ result = upsertResult
+ break
+ }
+ }
+
+ if result.Err == nil {
+ if err := transaction.Commit(); err != nil {
+ // don't need to rollback here since the transaction is already closed
+ result.Err = model.NewAppError("SqlPreferenceStore.Save", "store.sql_preference.save.commit_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = len(*preferences)
+ }
+ } else {
+ if err := transaction.Rollback(); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.Save", "store.sql_preference.save.rollback_transaction.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) save(transaction *gorp.Transaction, preference *model.Preference) store.StoreResult {
+ result := store.StoreResult{}
+
+ preference.PreUpdate()
+
+ if result.Err = preference.IsValid(); result.Err != nil {
+ return result
+ }
+
+ params := map[string]interface{}{
+ "UserId": preference.UserId,
+ "Category": preference.Category,
+ "Name": preference.Name,
+ "Value": preference.Value,
+ }
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ if _, err := transaction.Exec(
+ `INSERT INTO
+ Preferences
+ (UserId, Category, Name, Value)
+ VALUES
+ (:UserId, :Category, :Name, :Value)
+ ON DUPLICATE KEY UPDATE
+ Value = :Value`, params); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.save", "store.sql_preference.save.updating.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ // postgres has no way to upsert values until version 9.5 and trying inserting and then updating causes transactions to abort
+ count, err := transaction.SelectInt(
+ `SELECT
+ count(0)
+ FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category
+ AND Name = :Name`, params)
+ if err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.save", "store.sql_preference.save.updating.app_error", nil, err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ if count == 1 {
+ s.update(transaction, preference)
+ } else {
+ s.insert(transaction, preference)
+ }
+ } else {
+ result.Err = model.NewAppError("SqlPreferenceStore.save", "store.sql_preference.save.missing_driver.app_error", nil, "Failed to update preference because of missing driver", http.StatusNotImplemented)
+ }
+
+ return result
+}
+
+func (s SqlPreferenceStore) insert(transaction *gorp.Transaction, preference *model.Preference) store.StoreResult {
+ result := store.StoreResult{}
+
+ if err := transaction.Insert(preference); err != nil {
+ if IsUniqueConstraintError(err, []string{"UserId", "preferences_pkey"}) {
+ result.Err = model.NewAppError("SqlPreferenceStore.insert", "store.sql_preference.insert.exists.app_error", nil,
+ "user_id="+preference.UserId+", category="+preference.Category+", name="+preference.Name+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlPreferenceStore.insert", "store.sql_preference.insert.save.app_error", nil,
+ "user_id="+preference.UserId+", category="+preference.Category+", name="+preference.Name+", "+err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ return result
+}
+
+func (s SqlPreferenceStore) update(transaction *gorp.Transaction, preference *model.Preference) store.StoreResult {
+ result := store.StoreResult{}
+
+ if _, err := transaction.Update(preference); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.update", "store.sql_preference.update.app_error", nil,
+ "user_id="+preference.UserId+", category="+preference.Category+", name="+preference.Name+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ return result
+}
+
+func (s SqlPreferenceStore) Get(userId string, category string, name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var preference model.Preference
+
+ if err := s.GetReplica().SelectOne(&preference,
+ `SELECT
+ *
+ FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category
+ AND Name = :Name`, map[string]interface{}{"UserId": userId, "Category": category, "Name": name}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.Get", "store.sql_preference.get.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = preference
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) GetCategory(userId string, category string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var preferences model.Preferences
+
+ if _, err := s.GetReplica().Select(&preferences,
+ `SELECT
+ *
+ FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category`, map[string]interface{}{"UserId": userId, "Category": category}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.GetCategory", "store.sql_preference.get_category.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = preferences
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) GetAll(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var preferences model.Preferences
+
+ if _, err := s.GetReplica().Select(&preferences,
+ `SELECT
+ *
+ FROM
+ Preferences
+ WHERE
+ UserId = :UserId`, map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.GetAll", "store.sql_preference.get_all.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = preferences
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) PermanentDeleteByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec(
+ `DELETE FROM Preferences WHERE UserId = :UserId`, map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.Delete", "store.sql_preference.permanent_delete_by_user.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) IsFeatureEnabled(feature, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ if value, err := s.GetReplica().SelectStr(`SELECT
+ value
+ FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category
+ AND Name = :Name`, map[string]interface{}{"UserId": userId, "Category": model.PREFERENCE_CATEGORY_ADVANCED_SETTINGS, "Name": FEATURE_TOGGLE_PREFIX + feature}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.IsFeatureEnabled", "store.sql_preference.is_feature_enabled.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = value == "true"
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) Delete(userId, category, name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec(
+ `DELETE FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category
+ AND Name = :Name`, map[string]interface{}{"UserId": userId, "Category": category, "Name": name}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.Delete", "store.sql_preference.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) DeleteCategory(userId string, category string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec(
+ `DELETE FROM
+ Preferences
+ WHERE
+ UserId = :UserId
+ AND Category = :Category`, map[string]interface{}{"UserId": userId, "Category": category}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.DeleteCategory", "store.sql_preference.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) DeleteCategoryAndName(category string, name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec(
+ `DELETE FROM
+ Preferences
+ WHERE
+ Name = :Name
+ AND Category = :Category`, map[string]interface{}{"Name": name, "Category": category}); err != nil {
+ result.Err = model.NewAppError("SqlPreferenceStore.DeleteCategoryAndName", "store.sql_preference.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlPreferenceStore) CleanupFlagsBatch(limit int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `DELETE FROM
+ Preferences
+ WHERE
+ Category = :Category
+ AND Name IN (
+ SELECT
+ *
+ FROM (
+ SELECT
+ Preferences.Name
+ FROM
+ Preferences
+ LEFT JOIN
+ Posts
+ ON
+ Preferences.Name = Posts.Id
+ WHERE
+ Preferences.Category = :Category
+ AND Posts.Id IS null
+ LIMIT
+ :Limit
+ )
+ AS t
+ )`
+
+ sqlResult, err := s.GetMaster().Exec(query, map[string]interface{}{"Category": model.PREFERENCE_CATEGORY_FLAGGED_POST, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlPostStore.CleanupFlagsBatch", "store.sql_preference.cleanup_flags_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ } else {
+ rowsAffected, err1 := sqlResult.RowsAffected()
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlPostStore.CleanupFlagsBatch", "store.sql_preference.cleanup_flags_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ result.Data = int64(0)
+ } else {
+ result.Data = rowsAffected
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/preference_store_test.go b/store/sqlstore/preference_store_test.go
new file mode 100644
index 000000000..f1cebf379
--- /dev/null
+++ b/store/sqlstore/preference_store_test.go
@@ -0,0 +1,517 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestPreferenceSave(t *testing.T) {
+ ss := Setup()
+
+ id := model.NewId()
+
+ preferences := model.Preferences{
+ {
+ UserId: id,
+ Category: model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW,
+ Name: model.NewId(),
+ Value: "value1a",
+ },
+ {
+ UserId: id,
+ Category: model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW,
+ Name: model.NewId(),
+ Value: "value1b",
+ },
+ }
+ if count := store.Must(ss.Preference().Save(&preferences)); count != 2 {
+ t.Fatal("got incorrect number of rows saved")
+ }
+
+ for _, preference := range preferences {
+ if data := store.Must(ss.Preference().Get(preference.UserId, preference.Category, preference.Name)).(model.Preference); preference != data {
+ t.Fatal("got incorrect preference after first Save")
+ }
+ }
+
+ preferences[0].Value = "value2a"
+ preferences[1].Value = "value2b"
+ if count := store.Must(ss.Preference().Save(&preferences)); count != 2 {
+ t.Fatal("got incorrect number of rows saved")
+ }
+
+ for _, preference := range preferences {
+ if data := store.Must(ss.Preference().Get(preference.UserId, preference.Category, preference.Name)).(model.Preference); preference != data {
+ t.Fatal("got incorrect preference after second Save")
+ }
+ }
+}
+
+func TestPreferenceGet(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ category := model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW
+ name := model.NewId()
+
+ preferences := model.Preferences{
+ {
+ UserId: userId,
+ Category: category,
+ Name: name,
+ },
+ {
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ },
+ {
+ UserId: userId,
+ Category: model.NewId(),
+ Name: name,
+ },
+ {
+ UserId: model.NewId(),
+ Category: category,
+ Name: name,
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ if result := <-ss.Preference().Get(userId, category, name); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(model.Preference); data != preferences[0] {
+ t.Fatal("got incorrect preference")
+ }
+
+ // make sure getting a missing preference fails
+ if result := <-ss.Preference().Get(model.NewId(), model.NewId(), model.NewId()); result.Err == nil {
+ t.Fatal("no error on getting a missing preference")
+ }
+}
+
+func TestPreferenceGetCategory(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ category := model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW
+ name := model.NewId()
+
+ preferences := model.Preferences{
+ {
+ UserId: userId,
+ Category: category,
+ Name: name,
+ },
+ // same user/category, different name
+ {
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ },
+ // same user/name, different category
+ {
+ UserId: userId,
+ Category: model.NewId(),
+ Name: name,
+ },
+ // same name/category, different user
+ {
+ UserId: model.NewId(),
+ Category: category,
+ Name: name,
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ if result := <-ss.Preference().GetCategory(userId, category); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(model.Preferences); len(data) != 2 {
+ t.Fatal("got the wrong number of preferences")
+ } else if !((data[0] == preferences[0] && data[1] == preferences[1]) || (data[0] == preferences[1] && data[1] == preferences[0])) {
+ t.Fatal("got incorrect preferences")
+ }
+
+ // make sure getting a missing preference category doesn't fail
+ if result := <-ss.Preference().GetCategory(model.NewId(), model.NewId()); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(model.Preferences); len(data) != 0 {
+ t.Fatal("shouldn't have got any preferences")
+ }
+}
+
+func TestPreferenceGetAll(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ category := model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW
+ name := model.NewId()
+
+ preferences := model.Preferences{
+ {
+ UserId: userId,
+ Category: category,
+ Name: name,
+ },
+ // same user/category, different name
+ {
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ },
+ // same user/name, different category
+ {
+ UserId: userId,
+ Category: model.NewId(),
+ Name: name,
+ },
+ // same name/category, different user
+ {
+ UserId: model.NewId(),
+ Category: category,
+ Name: name,
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ if result := <-ss.Preference().GetAll(userId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(model.Preferences); len(data) != 3 {
+ t.Fatal("got the wrong number of preferences")
+ } else {
+ for i := 0; i < 3; i++ {
+ if data[0] != preferences[i] && data[1] != preferences[i] && data[2] != preferences[i] {
+ t.Fatal("got incorrect preferences")
+ }
+ }
+ }
+}
+
+func TestPreferenceDeleteByUser(t *testing.T) {
+ ss := Setup()
+
+ userId := model.NewId()
+ category := model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW
+ name := model.NewId()
+
+ preferences := model.Preferences{
+ {
+ UserId: userId,
+ Category: category,
+ Name: name,
+ },
+ // same user/category, different name
+ {
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ },
+ // same user/name, different category
+ {
+ UserId: userId,
+ Category: model.NewId(),
+ Name: name,
+ },
+ // same name/category, different user
+ {
+ UserId: model.NewId(),
+ Category: category,
+ Name: name,
+ },
+ }
+
+ store.Must(ss.Preference().Save(&preferences))
+
+ if result := <-ss.Preference().PermanentDeleteByUser(userId); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestIsFeatureEnabled(t *testing.T) {
+ ss := Setup()
+
+ feature1 := "testFeat1"
+ feature2 := "testFeat2"
+ feature3 := "testFeat3"
+
+ userId := model.NewId()
+ category := model.PREFERENCE_CATEGORY_ADVANCED_SETTINGS
+
+ features := model.Preferences{
+ {
+ UserId: userId,
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature1,
+ Value: "true",
+ },
+ {
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ Value: "false",
+ },
+ {
+ UserId: userId,
+ Category: model.NewId(),
+ Name: FEATURE_TOGGLE_PREFIX + feature1,
+ Value: "false",
+ },
+ {
+ UserId: model.NewId(),
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature2,
+ Value: "false",
+ },
+ {
+ UserId: model.NewId(),
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature3,
+ Value: "foobar",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&features))
+
+ if result := <-ss.Preference().IsFeatureEnabled(feature1, userId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(bool); data != true {
+ t.Fatalf("got incorrect setting for feature1, %v=%v", true, data)
+ }
+
+ if result := <-ss.Preference().IsFeatureEnabled(feature2, userId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(bool); data != false {
+ t.Fatalf("got incorrect setting for feature2, %v=%v", false, data)
+ }
+
+ // make sure we get false if something different than "true" or "false" has been saved to database
+ if result := <-ss.Preference().IsFeatureEnabled(feature3, userId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(bool); data != false {
+ t.Fatalf("got incorrect setting for feature3, %v=%v", false, data)
+ }
+
+ // make sure false is returned if a non-existent feature is queried
+ if result := <-ss.Preference().IsFeatureEnabled("someOtherFeature", userId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if data := result.Data.(bool); data != false {
+ t.Fatalf("got incorrect setting for non-existent feature 'someOtherFeature', %v=%v", false, data)
+ }
+}
+
+func TestDeleteUnusedFeatures(t *testing.T) {
+ ss := Setup()
+
+ userId1 := model.NewId()
+ userId2 := model.NewId()
+ category := model.PREFERENCE_CATEGORY_ADVANCED_SETTINGS
+ feature1 := "feature1"
+ feature2 := "feature2"
+
+ features := model.Preferences{
+ {
+ UserId: userId1,
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature1,
+ Value: "true",
+ },
+ {
+ UserId: userId2,
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature1,
+ Value: "false",
+ },
+ {
+ UserId: userId1,
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature2,
+ Value: "false",
+ },
+ {
+ UserId: userId2,
+ Category: category,
+ Name: FEATURE_TOGGLE_PREFIX + feature2,
+ Value: "true",
+ },
+ }
+
+ store.Must(ss.Preference().Save(&features))
+
+ ss.Preference().(*SqlPreferenceStore).DeleteUnusedFeatures()
+
+ //make sure features with value "false" have actually been deleted from the database
+ if val, err := ss.Preference().(*SqlPreferenceStore).GetReplica().SelectInt(`SELECT COUNT(*)
+ FROM Preferences
+ WHERE Category = :Category
+ AND Value = :Val
+ AND Name LIKE '`+FEATURE_TOGGLE_PREFIX+`%'`, map[string]interface{}{"Category": model.PREFERENCE_CATEGORY_ADVANCED_SETTINGS, "Val": "false"}); err != nil {
+ t.Fatal(err)
+ } else if val != 0 {
+ t.Fatalf("Found %d features with value 'false', expected all to be deleted", val)
+ }
+ //
+ // make sure features with value "true" remain saved
+ if val, err := ss.Preference().(*SqlPreferenceStore).GetReplica().SelectInt(`SELECT COUNT(*)
+ FROM Preferences
+ WHERE Category = :Category
+ AND Value = :Val
+ AND Name LIKE '`+FEATURE_TOGGLE_PREFIX+`%'`, map[string]interface{}{"Category": model.PREFERENCE_CATEGORY_ADVANCED_SETTINGS, "Val": "true"}); err != nil {
+ t.Fatal(err)
+ } else if val == 0 {
+ t.Fatalf("Found %d features with value 'true', expected to find at least %d features", val, 2)
+ }
+}
+
+func TestPreferenceDelete(t *testing.T) {
+ ss := Setup()
+
+ preference := model.Preference{
+ UserId: model.NewId(),
+ Category: model.PREFERENCE_CATEGORY_DIRECT_CHANNEL_SHOW,
+ Name: model.NewId(),
+ Value: "value1a",
+ }
+
+ store.Must(ss.Preference().Save(&model.Preferences{preference}))
+
+ if prefs := store.Must(ss.Preference().GetAll(preference.UserId)).(model.Preferences); len([]model.Preference(prefs)) != 1 {
+ t.Fatal("should've returned 1 preference")
+ }
+
+ if result := <-ss.Preference().Delete(preference.UserId, preference.Category, preference.Name); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if prefs := store.Must(ss.Preference().GetAll(preference.UserId)).(model.Preferences); len([]model.Preference(prefs)) != 0 {
+ t.Fatal("should've returned no preferences")
+ }
+}
+
+func TestPreferenceDeleteCategory(t *testing.T) {
+ ss := Setup()
+
+ category := model.NewId()
+ userId := model.NewId()
+
+ preference1 := model.Preference{
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ Value: "value1a",
+ }
+
+ preference2 := model.Preference{
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ Value: "value1a",
+ }
+
+ store.Must(ss.Preference().Save(&model.Preferences{preference1, preference2}))
+
+ if prefs := store.Must(ss.Preference().GetAll(userId)).(model.Preferences); len([]model.Preference(prefs)) != 2 {
+ t.Fatal("should've returned 2 preferences")
+ }
+
+ if result := <-ss.Preference().DeleteCategory(userId, category); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if prefs := store.Must(ss.Preference().GetAll(userId)).(model.Preferences); len([]model.Preference(prefs)) != 0 {
+ t.Fatal("should've returned no preferences")
+ }
+}
+
+func TestPreferenceDeleteCategoryAndName(t *testing.T) {
+ ss := Setup()
+
+ category := model.NewId()
+ name := model.NewId()
+ userId := model.NewId()
+ userId2 := model.NewId()
+
+ preference1 := model.Preference{
+ UserId: userId,
+ Category: category,
+ Name: name,
+ Value: "value1a",
+ }
+
+ preference2 := model.Preference{
+ UserId: userId2,
+ Category: category,
+ Name: name,
+ Value: "value1a",
+ }
+
+ store.Must(ss.Preference().Save(&model.Preferences{preference1, preference2}))
+
+ if prefs := store.Must(ss.Preference().GetAll(userId)).(model.Preferences); len([]model.Preference(prefs)) != 1 {
+ t.Fatal("should've returned 1 preference")
+ }
+
+ if prefs := store.Must(ss.Preference().GetAll(userId2)).(model.Preferences); len([]model.Preference(prefs)) != 1 {
+ t.Fatal("should've returned 1 preference")
+ }
+
+ if result := <-ss.Preference().DeleteCategoryAndName(category, name); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if prefs := store.Must(ss.Preference().GetAll(userId)).(model.Preferences); len([]model.Preference(prefs)) != 0 {
+ t.Fatal("should've returned no preferences")
+ }
+
+ if prefs := store.Must(ss.Preference().GetAll(userId2)).(model.Preferences); len([]model.Preference(prefs)) != 0 {
+ t.Fatal("should've returned no preferences")
+ }
+}
+
+func TestPreferenceCleanupFlagsBatch(t *testing.T) {
+ ss := Setup()
+
+ category := model.PREFERENCE_CATEGORY_FLAGGED_POST
+ userId := model.NewId()
+
+ o1 := &model.Post{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = userId
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1.CreateAt = 1000
+ o1 = (<-ss.Post().Save(o1)).Data.(*model.Post)
+
+ preference1 := model.Preference{
+ UserId: userId,
+ Category: category,
+ Name: o1.Id,
+ Value: "true",
+ }
+
+ preference2 := model.Preference{
+ UserId: userId,
+ Category: category,
+ Name: model.NewId(),
+ Value: "true",
+ }
+
+ store.Must(ss.Preference().Save(&model.Preferences{preference1, preference2}))
+
+ result := <-ss.Preference().CleanupFlagsBatch(10000)
+ assert.Nil(t, result.Err)
+
+ result = <-ss.Preference().Get(userId, category, preference1.Name)
+ assert.Nil(t, result.Err)
+
+ result = <-ss.Preference().Get(userId, category, preference2.Name)
+ assert.NotNil(t, result.Err)
+}
diff --git a/store/sqlstore/reaction_store_test.go b/store/sqlstore/reaction_store_test.go
new file mode 100644
index 000000000..276bcbffa
--- /dev/null
+++ b/store/sqlstore/reaction_store_test.go
@@ -0,0 +1,352 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestReactionSave(t *testing.T) {
+ ss := Setup()
+
+ post := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ })).(*model.Post)
+ firstUpdateAt := post.UpdateAt
+
+ reaction1 := &model.Reaction{
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: model.NewId(),
+ }
+ if result := <-ss.Reaction().Save(reaction1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if saved := result.Data.(*model.Reaction); saved.UserId != reaction1.UserId ||
+ saved.PostId != reaction1.PostId || saved.EmojiName != reaction1.EmojiName {
+ t.Fatal("should've saved reaction and returned it")
+ }
+
+ var secondUpdateAt int64
+ if postList := store.Must(ss.Post().Get(reaction1.PostId)).(*model.PostList); !postList.Posts[post.Id].HasReactions {
+ t.Fatal("should've set HasReactions = true on post")
+ } else if postList.Posts[post.Id].UpdateAt == firstUpdateAt {
+ t.Fatal("should've marked post as updated when HasReactions changed")
+ } else {
+ secondUpdateAt = postList.Posts[post.Id].UpdateAt
+ }
+
+ if result := <-ss.Reaction().Save(reaction1); result.Err != nil {
+ t.Log(result.Err)
+ t.Fatal("should've allowed saving a duplicate reaction")
+ }
+
+ // different user
+ reaction2 := &model.Reaction{
+ UserId: model.NewId(),
+ PostId: reaction1.PostId,
+ EmojiName: reaction1.EmojiName,
+ }
+ if result := <-ss.Reaction().Save(reaction2); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if postList := store.Must(ss.Post().Get(reaction2.PostId)).(*model.PostList); postList.Posts[post.Id].UpdateAt != secondUpdateAt {
+ t.Fatal("shouldn't mark as updated when HasReactions hasn't changed")
+ }
+
+ // different post
+ reaction3 := &model.Reaction{
+ UserId: reaction1.UserId,
+ PostId: model.NewId(),
+ EmojiName: reaction1.EmojiName,
+ }
+ if result := <-ss.Reaction().Save(reaction3); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ // different emoji
+ reaction4 := &model.Reaction{
+ UserId: reaction1.UserId,
+ PostId: reaction1.PostId,
+ EmojiName: model.NewId(),
+ }
+ if result := <-ss.Reaction().Save(reaction4); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ // invalid reaction
+ reaction5 := &model.Reaction{
+ UserId: reaction1.UserId,
+ PostId: reaction1.PostId,
+ }
+ if result := <-ss.Reaction().Save(reaction5); result.Err == nil {
+ t.Fatal("should've failed for invalid reaction")
+ }
+}
+
+func TestReactionDelete(t *testing.T) {
+ ss := Setup()
+
+ post := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ })).(*model.Post)
+
+ reaction := &model.Reaction{
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: model.NewId(),
+ }
+
+ store.Must(ss.Reaction().Save(reaction))
+ firstUpdateAt := store.Must(ss.Post().Get(reaction.PostId)).(*model.PostList).Posts[post.Id].UpdateAt
+
+ if result := <-ss.Reaction().Delete(reaction); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.Reaction().GetForPost(post.Id, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if len(result.Data.([]*model.Reaction)) != 0 {
+ t.Fatal("should've deleted reaction")
+ }
+
+ if postList := store.Must(ss.Post().Get(post.Id)).(*model.PostList); postList.Posts[post.Id].HasReactions {
+ t.Fatal("should've set HasReactions = false on post")
+ } else if postList.Posts[post.Id].UpdateAt == firstUpdateAt {
+ t.Fatal("shouldn't mark as updated when HasReactions has changed after deleting reactions")
+ }
+}
+
+func TestReactionGetForPost(t *testing.T) {
+ ss := Setup()
+
+ postId := model.NewId()
+
+ userId := model.NewId()
+
+ reactions := []*model.Reaction{
+ {
+ UserId: userId,
+ PostId: postId,
+ EmojiName: "smile",
+ },
+ {
+ UserId: model.NewId(),
+ PostId: postId,
+ EmojiName: "smile",
+ },
+ {
+ UserId: userId,
+ PostId: postId,
+ EmojiName: "sad",
+ },
+ {
+ UserId: userId,
+ PostId: model.NewId(),
+ EmojiName: "angry",
+ },
+ }
+
+ for _, reaction := range reactions {
+ store.Must(ss.Reaction().Save(reaction))
+ }
+
+ if result := <-ss.Reaction().GetForPost(postId, false); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.([]*model.Reaction); len(returned) != 3 {
+ t.Fatal("should've returned 3 reactions")
+ } else {
+ for _, reaction := range reactions {
+ found := false
+
+ for _, returnedReaction := range returned {
+ if returnedReaction.UserId == reaction.UserId && returnedReaction.PostId == reaction.PostId &&
+ returnedReaction.EmojiName == reaction.EmojiName {
+ found = true
+ break
+ }
+ }
+
+ if !found && reaction.PostId == postId {
+ t.Fatalf("should've returned reaction for post %v", reaction)
+ } else if found && reaction.PostId != postId {
+ t.Fatal("shouldn't have returned reaction for another post")
+ }
+ }
+ }
+
+ // Should return cached item
+ if result := <-ss.Reaction().GetForPost(postId, true); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if returned := result.Data.([]*model.Reaction); len(returned) != 3 {
+ t.Fatal("should've returned 3 reactions")
+ } else {
+ for _, reaction := range reactions {
+ found := false
+
+ for _, returnedReaction := range returned {
+ if returnedReaction.UserId == reaction.UserId && returnedReaction.PostId == reaction.PostId &&
+ returnedReaction.EmojiName == reaction.EmojiName {
+ found = true
+ break
+ }
+ }
+
+ if !found && reaction.PostId == postId {
+ t.Fatalf("should've returned reaction for post %v", reaction)
+ } else if found && reaction.PostId != postId {
+ t.Fatal("shouldn't have returned reaction for another post")
+ }
+ }
+ }
+}
+
+func TestReactionDeleteAllWithEmojiName(t *testing.T) {
+ ss := Setup()
+
+ emojiToDelete := model.NewId()
+
+ post := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ })).(*model.Post)
+ post2 := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ })).(*model.Post)
+ post3 := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ })).(*model.Post)
+
+ userId := model.NewId()
+
+ reactions := []*model.Reaction{
+ {
+ UserId: userId,
+ PostId: post.Id,
+ EmojiName: emojiToDelete,
+ },
+ {
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: emojiToDelete,
+ },
+ {
+ UserId: userId,
+ PostId: post.Id,
+ EmojiName: "sad",
+ },
+ {
+ UserId: userId,
+ PostId: post2.Id,
+ EmojiName: "angry",
+ },
+ {
+ UserId: userId,
+ PostId: post3.Id,
+ EmojiName: emojiToDelete,
+ },
+ }
+
+ for _, reaction := range reactions {
+ store.Must(ss.Reaction().Save(reaction))
+ }
+
+ if result := <-ss.Reaction().DeleteAllWithEmojiName(emojiToDelete); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ // check that the reactions were deleted
+ if returned := store.Must(ss.Reaction().GetForPost(post.Id, false)).([]*model.Reaction); len(returned) != 1 {
+ t.Fatal("should've only removed reactions with emoji name")
+ } else {
+ for _, reaction := range returned {
+ if reaction.EmojiName == "smile" {
+ t.Fatal("should've removed reaction with emoji name")
+ }
+ }
+ }
+
+ if returned := store.Must(ss.Reaction().GetForPost(post2.Id, false)).([]*model.Reaction); len(returned) != 1 {
+ t.Fatal("should've only removed reactions with emoji name")
+ }
+
+ if returned := store.Must(ss.Reaction().GetForPost(post3.Id, false)).([]*model.Reaction); len(returned) != 0 {
+ t.Fatal("should've only removed reactions with emoji name")
+ }
+
+ // check that the posts are updated
+ if postList := store.Must(ss.Post().Get(post.Id)).(*model.PostList); !postList.Posts[post.Id].HasReactions {
+ t.Fatal("post should still have reactions")
+ }
+
+ if postList := store.Must(ss.Post().Get(post2.Id)).(*model.PostList); !postList.Posts[post2.Id].HasReactions {
+ t.Fatal("post should still have reactions")
+ }
+
+ if postList := store.Must(ss.Post().Get(post3.Id)).(*model.PostList); postList.Posts[post3.Id].HasReactions {
+ t.Fatal("post shouldn't have reactions any more")
+ }
+}
+
+func TestReactionStorePermanentDeleteBatch(t *testing.T) {
+ ss := Setup()
+
+ post := store.Must(ss.Post().Save(&model.Post{
+ ChannelId: model.NewId(),
+ UserId: model.NewId(),
+ })).(*model.Post)
+
+ reactions := []*model.Reaction{
+ {
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: "sad",
+ CreateAt: 1000,
+ },
+ {
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: "sad",
+ CreateAt: 1500,
+ },
+ {
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: "sad",
+ CreateAt: 2000,
+ },
+ {
+ UserId: model.NewId(),
+ PostId: post.Id,
+ EmojiName: "sad",
+ CreateAt: 2000,
+ },
+ }
+
+ // Need to hang on to a reaction to delete later in order to clear the cache, as "allowFromCache" isn't honoured any more.
+ var lastReaction *model.Reaction
+ for _, reaction := range reactions {
+ lastReaction = store.Must(ss.Reaction().Save(reaction)).(*model.Reaction)
+ }
+
+ if returned := store.Must(ss.Reaction().GetForPost(post.Id, false)).([]*model.Reaction); len(returned) != 4 {
+ t.Fatal("expected 4 reactions")
+ }
+
+ store.Must(ss.Reaction().PermanentDeleteBatch(1800, 1000))
+
+ // This is to force a clear of the cache.
+ store.Must(ss.Reaction().Delete(lastReaction))
+
+ if returned := store.Must(ss.Reaction().GetForPost(post.Id, false)).([]*model.Reaction); len(returned) != 1 {
+ t.Fatalf("expected 1 reaction. Got: %v", len(returned))
+ }
+}
diff --git a/store/sqlstore/session_store.go b/store/sqlstore/session_store.go
new file mode 100644
index 000000000..09193f595
--- /dev/null
+++ b/store/sqlstore/session_store.go
@@ -0,0 +1,349 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ l4g "github.com/alecthomas/log4go"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlSessionStore struct {
+ SqlStore
+}
+
+func NewSqlSessionStore(sqlStore SqlStore) store.SessionStore {
+ us := &SqlSessionStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Session{}, "Sessions").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Token").SetMaxSize(26)
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("DeviceId").SetMaxSize(512)
+ table.ColMap("Roles").SetMaxSize(64)
+ table.ColMap("Props").SetMaxSize(1000)
+ }
+
+ return us
+}
+
+func (me SqlSessionStore) CreateIndexesIfNotExists() {
+ me.CreateIndexIfNotExists("idx_sessions_user_id", "Sessions", "UserId")
+ me.CreateIndexIfNotExists("idx_sessions_token", "Sessions", "Token")
+ me.CreateIndexIfNotExists("idx_sessions_expires_at", "Sessions", "ExpiresAt")
+ me.CreateIndexIfNotExists("idx_sessions_create_at", "Sessions", "CreateAt")
+ me.CreateIndexIfNotExists("idx_sessions_last_activity_at", "Sessions", "LastActivityAt")
+}
+
+func (me SqlSessionStore) Save(session *model.Session) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(session.Id) > 0 {
+ result.Err = model.NewAppError("SqlSessionStore.Save", "store.sql_session.save.existing.app_error", nil, "id="+session.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ session.PreSave()
+
+ if cur := <-me.CleanUpExpiredSessions(session.UserId); cur.Err != nil {
+ l4g.Error(utils.T("store.sql_session.save.cleanup.error"), cur.Err)
+ }
+
+ tcs := me.Team().GetTeamsForUser(session.UserId)
+
+ if err := me.GetMaster().Insert(session); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.Save", "store.sql_session.save.app_error", nil, "id="+session.Id+", "+err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ result.Data = session
+ }
+
+ if rtcs := <-tcs; rtcs.Err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.Save", "store.sql_session.save.app_error", nil, "id="+session.Id+", "+rtcs.Err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ tempMembers := rtcs.Data.([]*model.TeamMember)
+ session.TeamMembers = make([]*model.TeamMember, 0, len(tempMembers))
+ for _, tm := range tempMembers {
+ if tm.DeleteAt == 0 {
+ session.TeamMembers = append(session.TeamMembers, tm)
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) Get(sessionIdOrToken string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var sessions []*model.Session
+
+ if _, err := me.GetReplica().Select(&sessions, "SELECT * FROM Sessions WHERE Token = :Token OR Id = :Id LIMIT 1", map[string]interface{}{"Token": sessionIdOrToken, "Id": sessionIdOrToken}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.Get", "store.sql_session.get.app_error", nil, "sessionIdOrToken="+sessionIdOrToken+", "+err.Error(), http.StatusInternalServerError)
+ } else if sessions == nil || len(sessions) == 0 {
+ result.Err = model.NewAppError("SqlSessionStore.Get", "store.sql_session.get.app_error", nil, "sessionIdOrToken="+sessionIdOrToken, http.StatusNotFound)
+ } else {
+ result.Data = sessions[0]
+
+ tcs := me.Team().GetTeamsForUser(sessions[0].UserId)
+ if rtcs := <-tcs; rtcs.Err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.Get", "store.sql_session.get.app_error", nil, "sessionIdOrToken="+sessionIdOrToken+", "+rtcs.Err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ tempMembers := rtcs.Data.([]*model.TeamMember)
+ sessions[0].TeamMembers = make([]*model.TeamMember, 0, len(tempMembers))
+ for _, tm := range tempMembers {
+ if tm.DeleteAt == 0 {
+ sessions[0].TeamMembers = append(sessions[0].TeamMembers, tm)
+ }
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) GetSessions(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+
+ if cur := <-me.CleanUpExpiredSessions(userId); cur.Err != nil {
+ l4g.Error(utils.T("store.sql_session.get_sessions.error"), cur.Err)
+ }
+
+ result := store.StoreResult{}
+ var sessions []*model.Session
+
+ tcs := me.Team().GetTeamsForUser(userId)
+
+ if _, err := me.GetReplica().Select(&sessions, "SELECT * FROM Sessions WHERE UserId = :UserId ORDER BY LastActivityAt DESC", map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.GetSessions", "store.sql_session.get_sessions.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ result.Data = sessions
+ }
+
+ if rtcs := <-tcs; rtcs.Err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.GetSessions", "store.sql_session.get_sessions.app_error", nil, rtcs.Err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ for _, session := range sessions {
+ tempMembers := rtcs.Data.([]*model.TeamMember)
+ session.TeamMembers = make([]*model.TeamMember, 0, len(tempMembers))
+ for _, tm := range tempMembers {
+ if tm.DeleteAt == 0 {
+ session.TeamMembers = append(session.TeamMembers, tm)
+ }
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) GetSessionsWithActiveDeviceIds(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+
+ result := store.StoreResult{}
+ var sessions []*model.Session
+
+ if _, err := me.GetReplica().Select(&sessions, "SELECT * FROM Sessions WHERE UserId = :UserId AND ExpiresAt != 0 AND :ExpiresAt <= ExpiresAt AND DeviceId != ''", map[string]interface{}{"UserId": userId, "ExpiresAt": model.GetMillis()}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.GetActiveSessionsWithDeviceIds", "store.sql_session.get_sessions.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ result.Data = sessions
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) Remove(sessionIdOrToken string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := me.GetMaster().Exec("DELETE FROM Sessions WHERE Id = :Id Or Token = :Token", map[string]interface{}{"Id": sessionIdOrToken, "Token": sessionIdOrToken})
+ if err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.RemoveSession", "store.sql_session.remove.app_error", nil, "id="+sessionIdOrToken+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) RemoveAllSessions() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := me.GetMaster().Exec("DELETE FROM Sessions")
+ if err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.RemoveAllSessions", "store.sql_session.remove_all_sessions_for_team.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) PermanentDeleteSessionsByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := me.GetMaster().Exec("DELETE FROM Sessions WHERE UserId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.RemoveAllSessionsForUser", "store.sql_session.permanent_delete_sessions_by_user.app_error", nil, "id="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) CleanUpExpiredSessions(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := me.GetMaster().Exec("DELETE FROM Sessions WHERE UserId = :UserId AND ExpiresAt != 0 AND :ExpiresAt > ExpiresAt", map[string]interface{}{"UserId": userId, "ExpiresAt": model.GetMillis()}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.CleanUpExpiredSessions", "store.sql_session.cleanup_expired_sessions.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) UpdateLastActivityAt(sessionId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := me.GetMaster().Exec("UPDATE Sessions SET LastActivityAt = :LastActivityAt WHERE Id = :Id", map[string]interface{}{"LastActivityAt": time, "Id": sessionId}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.UpdateLastActivityAt", "store.sql_session.update_last_activity.app_error", nil, "sessionId="+sessionId, http.StatusInternalServerError)
+ } else {
+ result.Data = sessionId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) UpdateRoles(userId, roles string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ if _, err := me.GetMaster().Exec("UPDATE Sessions SET Roles = :Roles WHERE UserId = :UserId", map[string]interface{}{"Roles": roles, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.UpdateRoles", "store.sql_session.update_roles.app_error", nil, "userId="+userId, http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) UpdateDeviceId(id string, deviceId string, expiresAt int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ if _, err := me.GetMaster().Exec("UPDATE Sessions SET DeviceId = :DeviceId, ExpiresAt = :ExpiresAt WHERE Id = :Id", map[string]interface{}{"DeviceId": deviceId, "Id": id, "ExpiresAt": expiresAt}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.UpdateDeviceId", "store.sql_session.update_device_id.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = deviceId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (me SqlSessionStore) AnalyticsSessionCount() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT
+ COUNT(*)
+ FROM
+ Sessions
+ WHERE ExpiresAt > :Time`
+
+ if c, err := me.GetReplica().SelectInt(query, map[string]interface{}{"Time": model.GetMillis()}); err != nil {
+ result.Err = model.NewAppError("SqlSessionStore.AnalyticsSessionCount", "store.sql_session.analytics_session_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = c
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/session_store_test.go b/store/sqlstore/session_store_test.go
new file mode 100644
index 000000000..ab6ffec95
--- /dev/null
+++ b/store/sqlstore/session_store_test.go
@@ -0,0 +1,258 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestSessionStoreSave(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+
+ if err := (<-ss.Session().Save(&s1)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestSessionGet(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ s2 := model.Session{}
+ s2.UserId = s1.UserId
+ store.Must(ss.Session().Save(&s2))
+
+ s3 := model.Session{}
+ s3.UserId = s1.UserId
+ s3.ExpiresAt = 1
+ store.Must(ss.Session().Save(&s3))
+
+ if rs1 := (<-ss.Session().Get(s1.Id)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ } else {
+ if rs1.Data.(*model.Session).Id != s1.Id {
+ t.Fatal("should match")
+ }
+ }
+
+ if rs2 := (<-ss.Session().GetSessions(s1.UserId)); rs2.Err != nil {
+ t.Fatal(rs2.Err)
+ } else {
+ if len(rs2.Data.([]*model.Session)) != 2 {
+ t.Fatal("should match len")
+ }
+ }
+}
+
+func TestSessionGetWithDeviceId(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ s1.ExpiresAt = model.GetMillis() + 10000
+ store.Must(ss.Session().Save(&s1))
+
+ s2 := model.Session{}
+ s2.UserId = s1.UserId
+ s2.DeviceId = model.NewId()
+ s2.ExpiresAt = model.GetMillis() + 10000
+ store.Must(ss.Session().Save(&s2))
+
+ s3 := model.Session{}
+ s3.UserId = s1.UserId
+ s3.ExpiresAt = 1
+ s3.DeviceId = model.NewId()
+ store.Must(ss.Session().Save(&s3))
+
+ if rs1 := (<-ss.Session().GetSessionsWithActiveDeviceIds(s1.UserId)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ } else {
+ if len(rs1.Data.([]*model.Session)) != 1 {
+ t.Fatal("should match len")
+ }
+ }
+}
+
+func TestSessionRemove(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if rs1 := (<-ss.Session().Get(s1.Id)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ } else {
+ if rs1.Data.(*model.Session).Id != s1.Id {
+ t.Fatal("should match")
+ }
+ }
+
+ store.Must(ss.Session().Remove(s1.Id))
+
+ if rs2 := (<-ss.Session().Get(s1.Id)); rs2.Err == nil {
+ t.Fatal("should have been removed")
+ }
+}
+
+func TestSessionRemoveAll(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if rs1 := (<-ss.Session().Get(s1.Id)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ } else {
+ if rs1.Data.(*model.Session).Id != s1.Id {
+ t.Fatal("should match")
+ }
+ }
+
+ store.Must(ss.Session().RemoveAllSessions())
+
+ if rs2 := (<-ss.Session().Get(s1.Id)); rs2.Err == nil {
+ t.Fatal("should have been removed")
+ }
+}
+
+func TestSessionRemoveByUser(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if rs1 := (<-ss.Session().Get(s1.Id)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ } else {
+ if rs1.Data.(*model.Session).Id != s1.Id {
+ t.Fatal("should match")
+ }
+ }
+
+ store.Must(ss.Session().PermanentDeleteSessionsByUser(s1.UserId))
+
+ if rs2 := (<-ss.Session().Get(s1.Id)); rs2.Err == nil {
+ t.Fatal("should have been removed")
+ }
+}
+
+func TestSessionRemoveToken(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if rs1 := (<-ss.Session().Get(s1.Id)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ } else {
+ if rs1.Data.(*model.Session).Id != s1.Id {
+ t.Fatal("should match")
+ }
+ }
+
+ store.Must(ss.Session().Remove(s1.Token))
+
+ if rs2 := (<-ss.Session().Get(s1.Id)); rs2.Err == nil {
+ t.Fatal("should have been removed")
+ }
+
+ if rs3 := (<-ss.Session().GetSessions(s1.UserId)); rs3.Err != nil {
+ t.Fatal(rs3.Err)
+ } else {
+ if len(rs3.Data.([]*model.Session)) != 0 {
+ t.Fatal("should match len")
+ }
+ }
+}
+
+func TestSessionUpdateDeviceId(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if rs1 := (<-ss.Session().UpdateDeviceId(s1.Id, model.PUSH_NOTIFY_APPLE+":1234567890", s1.ExpiresAt)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ }
+
+ s2 := model.Session{}
+ s2.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s2))
+
+ if rs2 := (<-ss.Session().UpdateDeviceId(s2.Id, model.PUSH_NOTIFY_APPLE+":1234567890", s1.ExpiresAt)); rs2.Err != nil {
+ t.Fatal(rs2.Err)
+ }
+}
+
+func TestSessionUpdateDeviceId2(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if rs1 := (<-ss.Session().UpdateDeviceId(s1.Id, model.PUSH_NOTIFY_APPLE_REACT_NATIVE+":1234567890", s1.ExpiresAt)); rs1.Err != nil {
+ t.Fatal(rs1.Err)
+ }
+
+ s2 := model.Session{}
+ s2.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s2))
+
+ if rs2 := (<-ss.Session().UpdateDeviceId(s2.Id, model.PUSH_NOTIFY_APPLE_REACT_NATIVE+":1234567890", s1.ExpiresAt)); rs2.Err != nil {
+ t.Fatal(rs2.Err)
+ }
+}
+
+func TestSessionStoreUpdateLastActivityAt(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ store.Must(ss.Session().Save(&s1))
+
+ if err := (<-ss.Session().UpdateLastActivityAt(s1.Id, 1234567890)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.Session().Get(s1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Session).LastActivityAt != 1234567890 {
+ t.Fatal("LastActivityAt not updated correctly")
+ }
+ }
+
+}
+
+func TestSessionCount(t *testing.T) {
+ ss := Setup()
+
+ s1 := model.Session{}
+ s1.UserId = model.NewId()
+ s1.ExpiresAt = model.GetMillis() + 100000
+ store.Must(ss.Session().Save(&s1))
+
+ if r1 := <-ss.Session().AnalyticsSessionCount(); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) == 0 {
+ t.Fatal("should have at least 1 session")
+ }
+ }
+}
diff --git a/store/sqlstore/status_store.go b/store/sqlstore/status_store.go
new file mode 100644
index 000000000..43dab0c34
--- /dev/null
+++ b/store/sqlstore/status_store.go
@@ -0,0 +1,245 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+ "strconv"
+ "strings"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+const (
+ MISSING_STATUS_ERROR = "store.sql_status.get.missing.app_error"
+)
+
+type SqlStatusStore struct {
+ SqlStore
+}
+
+func NewSqlStatusStore(sqlStore SqlStore) store.StatusStore {
+ s := &SqlStatusStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Status{}, "Status").SetKeys(false, "UserId")
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("Status").SetMaxSize(32)
+ table.ColMap("ActiveChannel").SetMaxSize(26)
+ }
+
+ return s
+}
+
+func (s SqlStatusStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_status_user_id", "Status", "UserId")
+ s.CreateIndexIfNotExists("idx_status_status", "Status", "Status")
+}
+
+func (s SqlStatusStore) SaveOrUpdate(status *model.Status) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if err := s.GetReplica().SelectOne(&model.Status{}, "SELECT * FROM Status WHERE UserId = :UserId", map[string]interface{}{"UserId": status.UserId}); err == nil {
+ if _, err := s.GetMaster().Update(status); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.SaveOrUpdate", "store.sql_status.update.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ if err := s.GetMaster().Insert(status); err != nil {
+ if !(strings.Contains(err.Error(), "for key 'PRIMARY'") && strings.Contains(err.Error(), "Duplicate entry")) {
+ result.Err = model.NewAppError("SqlStatusStore.SaveOrUpdate", "store.sql_status.save.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) Get(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var status model.Status
+
+ if err := s.GetReplica().SelectOne(&status,
+ `SELECT
+ *
+ FROM
+ Status
+ WHERE
+ UserId = :UserId`, map[string]interface{}{"UserId": userId}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlStatusStore.Get", MISSING_STATUS_ERROR, nil, err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlStatusStore.Get", "store.sql_status.get.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = &status
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) GetByIds(userIds []string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ props := make(map[string]interface{})
+ idQuery := ""
+
+ for index, userId := range userIds {
+ if len(idQuery) > 0 {
+ idQuery += ", "
+ }
+
+ props["userId"+strconv.Itoa(index)] = userId
+ idQuery += ":userId" + strconv.Itoa(index)
+ }
+
+ var statuses []*model.Status
+ if _, err := s.GetReplica().Select(&statuses, "SELECT * FROM Status WHERE UserId IN ("+idQuery+")", props); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.GetByIds", "store.sql_status.get.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) GetOnlineAway() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Status
+ if _, err := s.GetReplica().Select(&statuses, "SELECT * FROM Status WHERE Status = :Online OR Status = :Away LIMIT 300", map[string]interface{}{"Online": model.STATUS_ONLINE, "Away": model.STATUS_AWAY}); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.GetOnlineAway", "store.sql_status.get_online_away.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) GetOnline() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Status
+ if _, err := s.GetReplica().Select(&statuses, "SELECT * FROM Status WHERE Status = :Online", map[string]interface{}{"Online": model.STATUS_ONLINE}); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.GetOnline", "store.sql_status.get_online.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) GetAllFromTeam(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var statuses []*model.Status
+ if _, err := s.GetReplica().Select(&statuses,
+ `SELECT s.* FROM Status AS s INNER JOIN
+ TeamMembers AS tm ON tm.TeamId=:TeamId AND s.UserId=tm.UserId`, map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.GetAllFromTeam", "store.sql_status.get_team_statuses.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) ResetAll() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("UPDATE Status SET Status = :Status WHERE Manual = false", map[string]interface{}{"Status": model.STATUS_OFFLINE}); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.ResetAll", "store.sql_status.reset_all.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) GetTotalActiveUsersCount() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ time := model.GetMillis() - (1000 * 60 * 60 * 24)
+
+ if count, err := s.GetReplica().SelectInt("SELECT COUNT(UserId) FROM Status WHERE LastActivityAt > :Time", map[string]interface{}{"Time": time}); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.GetTotalActiveUsersCount", "store.sql_status.get_total_active_users_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlStatusStore) UpdateLastActivityAt(userId string, lastActivityAt int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("UPDATE Status SET LastActivityAt = :Time WHERE UserId = :UserId", map[string]interface{}{"UserId": userId, "Time": lastActivityAt}); err != nil {
+ result.Err = model.NewAppError("SqlStatusStore.UpdateLastActivityAt", "store.sql_status.update_last_activity_at.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/status_store_test.go b/store/sqlstore/status_store_test.go
new file mode 100644
index 000000000..3f3a99837
--- /dev/null
+++ b/store/sqlstore/status_store_test.go
@@ -0,0 +1,105 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestSqlStatusStore(t *testing.T) {
+ ss := Setup()
+
+ status := &model.Status{UserId: model.NewId(), Status: model.STATUS_ONLINE, Manual: false, LastActivityAt: 0, ActiveChannel: ""}
+
+ if err := (<-ss.Status().SaveOrUpdate(status)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ status.LastActivityAt = 10
+
+ if err := (<-ss.Status().SaveOrUpdate(status)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.Status().Get(status.UserId)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ status2 := &model.Status{UserId: model.NewId(), Status: model.STATUS_AWAY, Manual: false, LastActivityAt: 0, ActiveChannel: ""}
+ if err := (<-ss.Status().SaveOrUpdate(status2)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ status3 := &model.Status{UserId: model.NewId(), Status: model.STATUS_OFFLINE, Manual: false, LastActivityAt: 0, ActiveChannel: ""}
+ if err := (<-ss.Status().SaveOrUpdate(status3)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if result := <-ss.Status().GetOnlineAway(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ statuses := result.Data.([]*model.Status)
+ for _, status := range statuses {
+ if status.Status == model.STATUS_OFFLINE {
+ t.Fatal("should not have returned offline statuses")
+ }
+ }
+ }
+
+ if result := <-ss.Status().GetOnline(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ statuses := result.Data.([]*model.Status)
+ for _, status := range statuses {
+ if status.Status != model.STATUS_ONLINE {
+ t.Fatal("should not have returned offline statuses")
+ }
+ }
+ }
+
+ if result := <-ss.Status().GetByIds([]string{status.UserId, "junk"}); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ statuses := result.Data.([]*model.Status)
+ if len(statuses) != 1 {
+ t.Fatal("should only have 1 status")
+ }
+ }
+
+ if err := (<-ss.Status().ResetAll()).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if result := <-ss.Status().Get(status.UserId); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ status := result.Data.(*model.Status)
+ if status.Status != model.STATUS_OFFLINE {
+ t.Fatal("should be offline")
+ }
+ }
+
+ if result := <-ss.Status().UpdateLastActivityAt(status.UserId, 10); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
+
+func TestActiveUserCount(t *testing.T) {
+ ss := Setup()
+
+ status := &model.Status{UserId: model.NewId(), Status: model.STATUS_ONLINE, Manual: false, LastActivityAt: model.GetMillis(), ActiveChannel: ""}
+ store.Must(ss.Status().SaveOrUpdate(status))
+
+ if result := <-ss.Status().GetTotalActiveUsersCount(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ count := result.Data.(int64)
+ if count <= 0 {
+ t.Fatal()
+ }
+ }
+}
diff --git a/store/sqlstore/store.go b/store/sqlstore/store.go
new file mode 100644
index 000000000..02fcaa1cb
--- /dev/null
+++ b/store/sqlstore/store.go
@@ -0,0 +1,87 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ _ "github.com/go-sql-driver/mysql"
+ _ "github.com/lib/pq"
+ "github.com/mattermost/gorp"
+
+ "github.com/mattermost/mattermost-server/store"
+)
+
+/*type SqlStore struct {
+ master *gorp.DbMap
+ replicas []*gorp.DbMap
+ searchReplicas []*gorp.DbMap
+ team TeamStore
+ channel ChannelStore
+ post PostStore
+ user UserStore
+ audit AuditStore
+ compliance ComplianceStore
+ session SessionStore
+ oauth OAuthStore
+ system SystemStore
+ webhook WebhookStore
+ command CommandStore
+ preference PreferenceStore
+ license LicenseStore
+ token TokenStore
+ emoji EmojiStore
+ status StatusStore
+ fileInfo FileInfoStore
+ reaction ReactionStore
+ jobStatus JobStatusStore
+ SchemaVersion string
+ rrCounter int64
+ srCounter int64
+}*/
+
+type SqlStore interface {
+ GetCurrentSchemaVersion() string
+ GetMaster() *gorp.DbMap
+ GetSearchReplica() *gorp.DbMap
+ GetReplica() *gorp.DbMap
+ TotalMasterDbConnections() int
+ TotalReadDbConnections() int
+ TotalSearchDbConnections() int
+ MarkSystemRanUnitTests()
+ DoesTableExist(tablename string) bool
+ DoesColumnExist(tableName string, columName string) bool
+ CreateColumnIfNotExists(tableName string, columnName string, mySqlColType string, postgresColType string, defaultValue string) bool
+ RemoveColumnIfExists(tableName string, columnName string) bool
+ RemoveTableIfExists(tableName string) bool
+ RenameColumnIfExists(tableName string, oldColumnName string, newColumnName string, colType string) bool
+ GetMaxLengthOfColumnIfExists(tableName string, columnName string) string
+ AlterColumnTypeIfExists(tableName string, columnName string, mySqlColType string, postgresColType string) bool
+ CreateUniqueIndexIfNotExists(indexName string, tableName string, columnName string) bool
+ CreateIndexIfNotExists(indexName string, tableName string, columnName string) bool
+ CreateFullTextIndexIfNotExists(indexName string, tableName string, columnName string) bool
+ RemoveIndexIfExists(indexName string, tableName string) bool
+ GetAllConns() []*gorp.DbMap
+ Close()
+ Team() store.TeamStore
+ Channel() store.ChannelStore
+ Post() store.PostStore
+ User() store.UserStore
+ Audit() store.AuditStore
+ ClusterDiscovery() store.ClusterDiscoveryStore
+ Compliance() store.ComplianceStore
+ Session() store.SessionStore
+ OAuth() store.OAuthStore
+ System() store.SystemStore
+ Webhook() store.WebhookStore
+ Command() store.CommandStore
+ CommandWebhook() store.CommandWebhookStore
+ Preference() store.PreferenceStore
+ License() store.LicenseStore
+ Token() store.TokenStore
+ Emoji() store.EmojiStore
+ Status() store.StatusStore
+ FileInfo() store.FileInfoStore
+ Reaction() store.ReactionStore
+ Job() store.JobStore
+ UserAccessToken() store.UserAccessTokenStore
+}
diff --git a/store/sqlstore/store_test.go b/store/sqlstore/store_test.go
new file mode 100644
index 000000000..0f306a475
--- /dev/null
+++ b/store/sqlstore/store_test.go
@@ -0,0 +1,151 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+var sqlStore store.Store
+
+func Setup() store.Store {
+ if sqlStore == nil {
+ utils.TranslationsPreInit()
+ utils.LoadConfig("config.json")
+ utils.InitTranslations(utils.Cfg.LocalizationSettings)
+ sqlStore = store.NewLayeredStore(NewSqlSupplier(nil), nil, nil)
+
+ sqlStore.MarkSystemRanUnitTests()
+ }
+ return sqlStore
+}
+
+/*
+func TestSqlStore1(t *testing.T) {
+ utils.TranslationsPreInit()
+ utils.LoadConfig("config.json")
+ utils.Cfg.SqlSettings.Trace = true
+
+ store := NewSqlStore()
+ ss.Close()
+
+ utils.Cfg.SqlSettings.DataSourceReplicas = []string{utils.Cfg.SqlSettings.DataSource}
+
+ store = NewSqlStore()
+ ss.TotalMasterDbConnections()
+ ss.TotalReadDbConnections()
+ ss.Close()
+
+ utils.LoadConfig("config.json")
+}
+
+func TestAlertDbCmds(t *testing.T) {
+ ss := Setup()
+
+ sqlStore := store.(SqlStore)
+
+ if !sqlStore.DoesTableExist("Systems") {
+ t.Fatal("Failed table exists")
+ }
+
+ if sqlStore.DoesColumnExist("Systems", "Test") {
+ t.Fatal("Column should not exist")
+ }
+
+ if !sqlStore.CreateColumnIfNotExists("Systems", "Test", "VARCHAR(50)", "VARCHAR(50)", "") {
+ t.Fatal("Failed to create column")
+ }
+
+ maxLen := sqlStore.GetMaxLengthOfColumnIfExists("Systems", "Test")
+
+ if maxLen != "50" {
+ t.Fatal("Failed to get max length found " + maxLen)
+ }
+
+ if !sqlStore.AlterColumnTypeIfExists("Systems", "Test", "VARCHAR(25)", "VARCHAR(25)") {
+ t.Fatal("failed to alter column size")
+ }
+
+ maxLen2 := sqlStore.GetMaxLengthOfColumnIfExists("Systems", "Test")
+
+ if maxLen2 != "25" {
+ t.Fatal("Failed to get max length")
+ }
+
+ if !sqlStore.RenameColumnIfExists("Systems", "Test", "Test1", "VARCHAR(25)") {
+ t.Fatal("Failed to rename column")
+ }
+
+ if sqlStore.DoesColumnExist("Systems", "Test") {
+ t.Fatal("Column should not exist")
+ }
+
+ if !sqlStore.DoesColumnExist("Systems", "Test1") {
+ t.Fatal("Column should exist")
+ }
+
+ sqlStore.CreateIndexIfNotExists("idx_systems_test1", "Systems", "Test1")
+ sqlStore.RemoveIndexIfExists("idx_systems_test1", "Systems")
+
+ sqlStore.CreateFullTextIndexIfNotExists("idx_systems_test1", "Systems", "Test1")
+ sqlStore.RemoveIndexIfExists("idx_systems_test1", "Systems")
+
+ if !sqlStore.RemoveColumnIfExists("Systems", "Test1") {
+ t.Fatal("Failed to remove columns")
+ }
+
+ if sqlStore.DoesColumnExist("Systems", "Test1") {
+ t.Fatal("Column should not exist")
+ }
+}
+
+func TestCreateIndexIfNotExists(t *testing.T) {
+ ss := Setup()
+
+ sqlStore := store.(SqlStore)
+
+ defer sqlStore.RemoveColumnIfExists("Systems", "Test")
+ if !sqlStore.CreateColumnIfNotExists("Systems", "Test", "VARCHAR(50)", "VARCHAR(50)", "") {
+ t.Fatal("Failed to create test column")
+ }
+
+ defer sqlStore.RemoveIndexIfExists("idx_systems_create_index_test", "Systems")
+ if !sqlStore.CreateIndexIfNotExists("idx_systems_create_index_test", "Systems", "Test") {
+ t.Fatal("Should've created test index")
+ }
+
+ if sqlStore.CreateIndexIfNotExists("idx_systems_create_index_test", "Systems", "Test") {
+ t.Fatal("Shouldn't have created index that already exists")
+ }
+}
+
+func TestRemoveIndexIfExists(t *testing.T) {
+ ss := Setup()
+
+ sqlStore := store.(SqlStore)
+
+ defer sqlStore.RemoveColumnIfExists("Systems", "Test")
+ if !sqlStore.CreateColumnIfNotExists("Systems", "Test", "VARCHAR(50)", "VARCHAR(50)", "") {
+ t.Fatal("Failed to create test column")
+ }
+
+ if sqlStore.RemoveIndexIfExists("idx_systems_remove_index_test", "Systems") {
+ t.Fatal("Should've failed to remove index that doesn't exist")
+ }
+
+ defer sqlStore.RemoveIndexIfExists("idx_systems_remove_index_test", "Systems")
+ if !sqlStore.CreateIndexIfNotExists("idx_systems_remove_index_test", "Systems", "Test") {
+ t.Fatal("Should've created test index")
+ }
+
+ if !sqlStore.RemoveIndexIfExists("idx_systems_remove_index_test", "Systems") {
+ t.Fatal("Should've removed index that exists")
+ }
+
+ if sqlStore.RemoveIndexIfExists("idx_systems_remove_index_test", "Systems") {
+ t.Fatal("Should've failed to remove index that was already removed")
+ }
+}
+*/
diff --git a/store/sqlstore/supplier.go b/store/sqlstore/supplier.go
new file mode 100644
index 000000000..7d10fef36
--- /dev/null
+++ b/store/sqlstore/supplier.go
@@ -0,0 +1,884 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "context"
+ dbsql "database/sql"
+ "encoding/json"
+ "errors"
+ "fmt"
+ sqltrace "log"
+ "os"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ l4g "github.com/alecthomas/log4go"
+ "github.com/go-sql-driver/mysql"
+ "github.com/lib/pq"
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+const (
+ INDEX_TYPE_FULL_TEXT = "full_text"
+ INDEX_TYPE_DEFAULT = "default"
+ MAX_DB_CONN_LIFETIME = 60
+ DB_PING_ATTEMPTS = 18
+ DB_PING_TIMEOUT_SECS = 10
+)
+
+const (
+ EXIT_CREATE_TABLE = 100
+ EXIT_DB_OPEN = 101
+ EXIT_PING = 102
+ EXIT_NO_DRIVER = 103
+ EXIT_TABLE_EXISTS = 104
+ EXIT_TABLE_EXISTS_MYSQL = 105
+ EXIT_COLUMN_EXISTS = 106
+ EXIT_DOES_COLUMN_EXISTS_POSTGRES = 107
+ EXIT_DOES_COLUMN_EXISTS_MYSQL = 108
+ EXIT_DOES_COLUMN_EXISTS_MISSING = 109
+ EXIT_CREATE_COLUMN_POSTGRES = 110
+ EXIT_CREATE_COLUMN_MYSQL = 111
+ EXIT_CREATE_COLUMN_MISSING = 112
+ EXIT_REMOVE_COLUMN = 113
+ EXIT_RENAME_COLUMN = 114
+ EXIT_MAX_COLUMN = 115
+ EXIT_ALTER_COLUMN = 116
+ EXIT_CREATE_INDEX_POSTGRES = 117
+ EXIT_CREATE_INDEX_MYSQL = 118
+ EXIT_CREATE_INDEX_FULL_MYSQL = 119
+ EXIT_CREATE_INDEX_MISSING = 120
+ EXIT_REMOVE_INDEX_POSTGRES = 121
+ EXIT_REMOVE_INDEX_MYSQL = 122
+ EXIT_REMOVE_INDEX_MISSING = 123
+ EXIT_REMOVE_TABLE = 134
+)
+
+type SqlSupplierOldStores struct {
+ team store.TeamStore
+ channel store.ChannelStore
+ post store.PostStore
+ user store.UserStore
+ audit store.AuditStore
+ cluster store.ClusterDiscoveryStore
+ compliance store.ComplianceStore
+ session store.SessionStore
+ oauth store.OAuthStore
+ system store.SystemStore
+ webhook store.WebhookStore
+ command store.CommandStore
+ commandWebhook store.CommandWebhookStore
+ preference store.PreferenceStore
+ license store.LicenseStore
+ token store.TokenStore
+ emoji store.EmojiStore
+ status store.StatusStore
+ fileInfo store.FileInfoStore
+ reaction store.ReactionStore
+ job store.JobStore
+ userAccessToken store.UserAccessTokenStore
+}
+
+type SqlSupplier struct {
+ // rrCounter and srCounter should be kept first.
+ // See https://github.com/mattermost/mattermost-server/pull/7281
+ rrCounter int64
+ srCounter int64
+ next store.LayeredStoreSupplier
+ master *gorp.DbMap
+ replicas []*gorp.DbMap
+ searchReplicas []*gorp.DbMap
+ oldStores SqlSupplierOldStores
+}
+
+func NewSqlSupplier(metrics einterfaces.MetricsInterface) *SqlSupplier {
+ supplier := &SqlSupplier{
+ rrCounter: 0,
+ srCounter: 0,
+ }
+
+ supplier.initConnection()
+
+ supplier.oldStores.team = NewSqlTeamStore(supplier)
+ supplier.oldStores.channel = NewSqlChannelStore(supplier, metrics)
+ supplier.oldStores.post = NewSqlPostStore(supplier, metrics)
+ supplier.oldStores.user = NewSqlUserStore(supplier, metrics)
+ supplier.oldStores.audit = NewSqlAuditStore(supplier)
+ supplier.oldStores.cluster = NewSqlClusterDiscoveryStore(supplier)
+ supplier.oldStores.compliance = NewSqlComplianceStore(supplier)
+ supplier.oldStores.session = NewSqlSessionStore(supplier)
+ supplier.oldStores.oauth = NewSqlOAuthStore(supplier)
+ supplier.oldStores.system = NewSqlSystemStore(supplier)
+ supplier.oldStores.webhook = NewSqlWebhookStore(supplier, metrics)
+ supplier.oldStores.command = NewSqlCommandStore(supplier)
+ supplier.oldStores.commandWebhook = NewSqlCommandWebhookStore(supplier)
+ supplier.oldStores.preference = NewSqlPreferenceStore(supplier)
+ supplier.oldStores.license = NewSqlLicenseStore(supplier)
+ supplier.oldStores.token = NewSqlTokenStore(supplier)
+ supplier.oldStores.emoji = NewSqlEmojiStore(supplier, metrics)
+ supplier.oldStores.status = NewSqlStatusStore(supplier)
+ supplier.oldStores.fileInfo = NewSqlFileInfoStore(supplier, metrics)
+ supplier.oldStores.job = NewSqlJobStore(supplier)
+ supplier.oldStores.userAccessToken = NewSqlUserAccessTokenStore(supplier)
+
+ initSqlSupplierReactions(supplier)
+
+ err := supplier.GetMaster().CreateTablesIfNotExists()
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.creating_tables.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_TABLE)
+ }
+
+ UpgradeDatabase(supplier)
+
+ supplier.oldStores.team.(*SqlTeamStore).CreateIndexesIfNotExists()
+ supplier.oldStores.channel.(*SqlChannelStore).CreateIndexesIfNotExists()
+ supplier.oldStores.post.(*SqlPostStore).CreateIndexesIfNotExists()
+ supplier.oldStores.user.(*SqlUserStore).CreateIndexesIfNotExists()
+ supplier.oldStores.audit.(*SqlAuditStore).CreateIndexesIfNotExists()
+ supplier.oldStores.compliance.(*SqlComplianceStore).CreateIndexesIfNotExists()
+ supplier.oldStores.session.(*SqlSessionStore).CreateIndexesIfNotExists()
+ supplier.oldStores.oauth.(*SqlOAuthStore).CreateIndexesIfNotExists()
+ supplier.oldStores.system.(*SqlSystemStore).CreateIndexesIfNotExists()
+ supplier.oldStores.webhook.(*SqlWebhookStore).CreateIndexesIfNotExists()
+ supplier.oldStores.command.(*SqlCommandStore).CreateIndexesIfNotExists()
+ supplier.oldStores.commandWebhook.(*SqlCommandWebhookStore).CreateIndexesIfNotExists()
+ supplier.oldStores.preference.(*SqlPreferenceStore).CreateIndexesIfNotExists()
+ supplier.oldStores.license.(*SqlLicenseStore).CreateIndexesIfNotExists()
+ supplier.oldStores.token.(*SqlTokenStore).CreateIndexesIfNotExists()
+ supplier.oldStores.emoji.(*SqlEmojiStore).CreateIndexesIfNotExists()
+ supplier.oldStores.status.(*SqlStatusStore).CreateIndexesIfNotExists()
+ supplier.oldStores.fileInfo.(*SqlFileInfoStore).CreateIndexesIfNotExists()
+ supplier.oldStores.job.(*SqlJobStore).CreateIndexesIfNotExists()
+ supplier.oldStores.userAccessToken.(*SqlUserAccessTokenStore).CreateIndexesIfNotExists()
+
+ supplier.oldStores.preference.(*SqlPreferenceStore).DeleteUnusedFeatures()
+
+ return supplier
+}
+
+func (s *SqlSupplier) SetChainNext(next store.LayeredStoreSupplier) {
+ s.next = next
+}
+
+func (s *SqlSupplier) Next() store.LayeredStoreSupplier {
+ return s.next
+}
+
+func setupConnection(con_type string, driver string, dataSource string, maxIdle int, maxOpen int, trace bool) *gorp.DbMap {
+ db, err := dbsql.Open(driver, dataSource)
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.open_conn.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_DB_OPEN)
+ }
+
+ for i := 0; i < DB_PING_ATTEMPTS; i++ {
+ l4g.Info("Pinging SQL %v database", con_type)
+ ctx, cancel := context.WithTimeout(context.Background(), DB_PING_TIMEOUT_SECS*time.Second)
+ defer cancel()
+ err = db.PingContext(ctx)
+ if err == nil {
+ break
+ } else {
+ if i == DB_PING_ATTEMPTS-1 {
+ l4g.Critical("Failed to ping DB, server will exit err=%v", err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_PING)
+ } else {
+ l4g.Error("Failed to ping DB retrying in %v seconds err=%v", DB_PING_TIMEOUT_SECS, err)
+ time.Sleep(DB_PING_TIMEOUT_SECS * time.Second)
+ }
+ }
+ }
+
+ db.SetMaxIdleConns(maxIdle)
+ db.SetMaxOpenConns(maxOpen)
+ db.SetConnMaxLifetime(time.Duration(MAX_DB_CONN_LIFETIME) * time.Minute)
+
+ var dbmap *gorp.DbMap
+
+ connectionTimeout := time.Duration(*utils.Cfg.SqlSettings.QueryTimeout) * time.Second
+
+ if driver == "sqlite3" {
+ dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.SqliteDialect{}, QueryTimeout: connectionTimeout}
+ } else if driver == model.DATABASE_DRIVER_MYSQL {
+ dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.MySQLDialect{Engine: "InnoDB", Encoding: "UTF8MB4"}, QueryTimeout: connectionTimeout}
+ } else if driver == model.DATABASE_DRIVER_POSTGRES {
+ dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.PostgresDialect{}, QueryTimeout: connectionTimeout}
+ } else {
+ l4g.Critical(utils.T("store.sql.dialect_driver.critical"))
+ time.Sleep(time.Second)
+ os.Exit(EXIT_NO_DRIVER)
+ }
+
+ if trace {
+ dbmap.TraceOn("", sqltrace.New(os.Stdout, "sql-trace:", sqltrace.Lmicroseconds))
+ }
+
+ return dbmap
+}
+
+func (s *SqlSupplier) initConnection() {
+ s.master = setupConnection("master", *utils.Cfg.SqlSettings.DriverName,
+ *utils.Cfg.SqlSettings.DataSource, *utils.Cfg.SqlSettings.MaxIdleConns,
+ *utils.Cfg.SqlSettings.MaxOpenConns, utils.Cfg.SqlSettings.Trace)
+
+ if len(utils.Cfg.SqlSettings.DataSourceReplicas) == 0 {
+ s.replicas = make([]*gorp.DbMap, 1)
+ s.replicas[0] = s.master
+ } else {
+ s.replicas = make([]*gorp.DbMap, len(utils.Cfg.SqlSettings.DataSourceReplicas))
+ for i, replica := range utils.Cfg.SqlSettings.DataSourceReplicas {
+ s.replicas[i] = setupConnection(fmt.Sprintf("replica-%v", i), *utils.Cfg.SqlSettings.DriverName, replica,
+ *utils.Cfg.SqlSettings.MaxIdleConns, *utils.Cfg.SqlSettings.MaxOpenConns,
+ utils.Cfg.SqlSettings.Trace)
+ }
+ }
+
+ if len(utils.Cfg.SqlSettings.DataSourceSearchReplicas) == 0 {
+ s.searchReplicas = s.replicas
+ } else {
+ s.searchReplicas = make([]*gorp.DbMap, len(utils.Cfg.SqlSettings.DataSourceSearchReplicas))
+ for i, replica := range utils.Cfg.SqlSettings.DataSourceSearchReplicas {
+ s.searchReplicas[i] = setupConnection(fmt.Sprintf("search-replica-%v", i), *utils.Cfg.SqlSettings.DriverName, replica,
+ *utils.Cfg.SqlSettings.MaxIdleConns, *utils.Cfg.SqlSettings.MaxOpenConns,
+ utils.Cfg.SqlSettings.Trace)
+ }
+ }
+}
+
+func (ss *SqlSupplier) GetCurrentSchemaVersion() string {
+ version, _ := ss.GetMaster().SelectStr("SELECT Value FROM Systems WHERE Name='Version'")
+ return version
+}
+
+func (ss *SqlSupplier) GetMaster() *gorp.DbMap {
+ return ss.master
+}
+
+func (ss *SqlSupplier) GetSearchReplica() *gorp.DbMap {
+ rrNum := atomic.AddInt64(&ss.srCounter, 1) % int64(len(ss.searchReplicas))
+ return ss.searchReplicas[rrNum]
+}
+
+func (ss *SqlSupplier) GetReplica() *gorp.DbMap {
+ rrNum := atomic.AddInt64(&ss.rrCounter, 1) % int64(len(ss.replicas))
+ return ss.replicas[rrNum]
+}
+
+func (ss *SqlSupplier) TotalMasterDbConnections() int {
+ return ss.GetMaster().Db.Stats().OpenConnections
+}
+
+func (ss *SqlSupplier) TotalReadDbConnections() int {
+
+ if len(utils.Cfg.SqlSettings.DataSourceReplicas) == 0 {
+ return 0
+ }
+
+ count := 0
+ for _, db := range ss.replicas {
+ count = count + db.Db.Stats().OpenConnections
+ }
+
+ return count
+}
+
+func (ss *SqlSupplier) TotalSearchDbConnections() int {
+ if len(utils.Cfg.SqlSettings.DataSourceSearchReplicas) == 0 {
+ return 0
+ }
+
+ count := 0
+ for _, db := range ss.searchReplicas {
+ count = count + db.Db.Stats().OpenConnections
+ }
+
+ return count
+}
+
+func (ss *SqlSupplier) MarkSystemRanUnitTests() {
+ if result := <-ss.System().Get(); result.Err == nil {
+ props := result.Data.(model.StringMap)
+ unitTests := props[model.SYSTEM_RAN_UNIT_TESTS]
+ if len(unitTests) == 0 {
+ systemTests := &model.System{Name: model.SYSTEM_RAN_UNIT_TESTS, Value: "1"}
+ <-ss.System().Save(systemTests)
+ }
+ }
+}
+
+func (ss *SqlSupplier) DoesTableExist(tableName string) bool {
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ count, err := ss.GetMaster().SelectInt(
+ `SELECT count(relname) FROM pg_class WHERE relname=$1`,
+ strings.ToLower(tableName),
+ )
+
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.table_exists.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_TABLE_EXISTS)
+ }
+
+ return count > 0
+
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+
+ count, err := ss.GetMaster().SelectInt(
+ `SELECT
+ COUNT(0) AS table_exists
+ FROM
+ information_schema.TABLES
+ WHERE
+ TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = ?
+ `,
+ tableName,
+ )
+
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.table_exists.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_TABLE_EXISTS_MYSQL)
+ }
+
+ return count > 0
+
+ } else {
+ l4g.Critical(utils.T("store.sql.column_exists_missing_driver.critical"))
+ time.Sleep(time.Second)
+ os.Exit(EXIT_COLUMN_EXISTS)
+ return false
+ }
+}
+
+func (ss *SqlSupplier) DoesColumnExist(tableName string, columnName string) bool {
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ count, err := ss.GetMaster().SelectInt(
+ `SELECT COUNT(0)
+ FROM pg_attribute
+ WHERE attrelid = $1::regclass
+ AND attname = $2
+ AND NOT attisdropped`,
+ strings.ToLower(tableName),
+ strings.ToLower(columnName),
+ )
+
+ if err != nil {
+ if err.Error() == "pq: relation \""+strings.ToLower(tableName)+"\" does not exist" {
+ return false
+ }
+
+ l4g.Critical(utils.T("store.sql.column_exists.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_DOES_COLUMN_EXISTS_POSTGRES)
+ }
+
+ return count > 0
+
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+
+ count, err := ss.GetMaster().SelectInt(
+ `SELECT
+ COUNT(0) AS column_exists
+ FROM
+ information_schema.COLUMNS
+ WHERE
+ TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = ?
+ AND COLUMN_NAME = ?`,
+ tableName,
+ columnName,
+ )
+
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.column_exists.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_DOES_COLUMN_EXISTS_MYSQL)
+ }
+
+ return count > 0
+
+ } else {
+ l4g.Critical(utils.T("store.sql.column_exists_missing_driver.critical"))
+ time.Sleep(time.Second)
+ os.Exit(EXIT_DOES_COLUMN_EXISTS_MISSING)
+ return false
+ }
+}
+
+func (ss *SqlSupplier) CreateColumnIfNotExists(tableName string, columnName string, mySqlColType string, postgresColType string, defaultValue string) bool {
+
+ if ss.DoesColumnExist(tableName, columnName) {
+ return false
+ }
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ _, err := ss.GetMaster().ExecNoTimeout("ALTER TABLE " + tableName + " ADD " + columnName + " " + postgresColType + " DEFAULT '" + defaultValue + "'")
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.create_column.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_COLUMN_POSTGRES)
+ }
+
+ return true
+
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ _, err := ss.GetMaster().ExecNoTimeout("ALTER TABLE " + tableName + " ADD " + columnName + " " + mySqlColType + " DEFAULT '" + defaultValue + "'")
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.create_column.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_COLUMN_MYSQL)
+ }
+
+ return true
+
+ } else {
+ l4g.Critical(utils.T("store.sql.create_column_missing_driver.critical"))
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_COLUMN_MISSING)
+ return false
+ }
+}
+
+func (ss *SqlSupplier) RemoveColumnIfExists(tableName string, columnName string) bool {
+
+ if !ss.DoesColumnExist(tableName, columnName) {
+ return false
+ }
+
+ _, err := ss.GetMaster().ExecNoTimeout("ALTER TABLE " + tableName + " DROP COLUMN " + columnName)
+ if err != nil {
+ l4g.Critical("Failed to drop column %v", err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_REMOVE_COLUMN)
+ }
+
+ return true
+}
+
+func (ss *SqlSupplier) RemoveTableIfExists(tableName string) bool {
+ if !ss.DoesTableExist(tableName) {
+ return false
+ }
+
+ _, err := ss.GetMaster().ExecNoTimeout("DROP TABLE " + tableName)
+ if err != nil {
+ l4g.Critical("Failed to drop table %v", err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_REMOVE_TABLE)
+ }
+
+ return true
+}
+
+func (ss *SqlSupplier) RenameColumnIfExists(tableName string, oldColumnName string, newColumnName string, colType string) bool {
+ if !ss.DoesColumnExist(tableName, oldColumnName) {
+ return false
+ }
+
+ var err error
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ _, err = ss.GetMaster().ExecNoTimeout("ALTER TABLE " + tableName + " CHANGE " + oldColumnName + " " + newColumnName + " " + colType)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ _, err = ss.GetMaster().ExecNoTimeout("ALTER TABLE " + tableName + " RENAME COLUMN " + oldColumnName + " TO " + newColumnName)
+ }
+
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.rename_column.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_RENAME_COLUMN)
+ }
+
+ return true
+}
+
+func (ss *SqlSupplier) GetMaxLengthOfColumnIfExists(tableName string, columnName string) string {
+ if !ss.DoesColumnExist(tableName, columnName) {
+ return ""
+ }
+
+ var result string
+ var err error
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ result, err = ss.GetMaster().SelectStr("SELECT CHARACTER_MAXIMUM_LENGTH FROM information_schema.columns WHERE table_name = '" + tableName + "' AND COLUMN_NAME = '" + columnName + "'")
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ result, err = ss.GetMaster().SelectStr("SELECT character_maximum_length FROM information_schema.columns WHERE table_name = '" + strings.ToLower(tableName) + "' AND column_name = '" + strings.ToLower(columnName) + "'")
+ }
+
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.maxlength_column.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_MAX_COLUMN)
+ }
+
+ return result
+}
+
+func (ss *SqlSupplier) AlterColumnTypeIfExists(tableName string, columnName string, mySqlColType string, postgresColType string) bool {
+ if !ss.DoesColumnExist(tableName, columnName) {
+ return false
+ }
+
+ var err error
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ _, err = ss.GetMaster().ExecNoTimeout("ALTER TABLE " + tableName + " MODIFY " + columnName + " " + mySqlColType)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ _, err = ss.GetMaster().ExecNoTimeout("ALTER TABLE " + strings.ToLower(tableName) + " ALTER COLUMN " + strings.ToLower(columnName) + " TYPE " + postgresColType)
+ }
+
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.alter_column_type.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_ALTER_COLUMN)
+ }
+
+ return true
+}
+
+func (ss *SqlSupplier) CreateUniqueIndexIfNotExists(indexName string, tableName string, columnName string) bool {
+ return ss.createIndexIfNotExists(indexName, tableName, columnName, INDEX_TYPE_DEFAULT, true)
+}
+
+func (ss *SqlSupplier) CreateIndexIfNotExists(indexName string, tableName string, columnName string) bool {
+ return ss.createIndexIfNotExists(indexName, tableName, columnName, INDEX_TYPE_DEFAULT, false)
+}
+
+func (ss *SqlSupplier) CreateFullTextIndexIfNotExists(indexName string, tableName string, columnName string) bool {
+ return ss.createIndexIfNotExists(indexName, tableName, columnName, INDEX_TYPE_FULL_TEXT, false)
+}
+
+func (ss *SqlSupplier) createIndexIfNotExists(indexName string, tableName string, columnName string, indexType string, unique bool) bool {
+
+ uniqueStr := ""
+ if unique {
+ uniqueStr = "UNIQUE "
+ }
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ _, errExists := ss.GetMaster().SelectStr("SELECT $1::regclass", indexName)
+ // It should fail if the index does not exist
+ if errExists == nil {
+ return false
+ }
+
+ query := ""
+ if indexType == INDEX_TYPE_FULL_TEXT {
+ postgresColumnNames := convertMySQLFullTextColumnsToPostgres(columnName)
+ query = "CREATE INDEX " + indexName + " ON " + tableName + " USING gin(to_tsvector('english', " + postgresColumnNames + "))"
+ } else {
+ query = "CREATE " + uniqueStr + "INDEX " + indexName + " ON " + tableName + " (" + columnName + ")"
+ }
+
+ _, err := ss.GetMaster().ExecNoTimeout(query)
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.create_index.critical"), errExists)
+ l4g.Critical(utils.T("store.sql.create_index.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_INDEX_POSTGRES)
+ }
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+
+ count, err := ss.GetMaster().SelectInt("SELECT COUNT(0) AS index_exists FROM information_schema.statistics WHERE TABLE_SCHEMA = DATABASE() and table_name = ? AND index_name = ?", tableName, indexName)
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.check_index.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_INDEX_MYSQL)
+ }
+
+ if count > 0 {
+ return false
+ }
+
+ fullTextIndex := ""
+ if indexType == INDEX_TYPE_FULL_TEXT {
+ fullTextIndex = " FULLTEXT "
+ }
+
+ _, err = ss.GetMaster().ExecNoTimeout("CREATE " + uniqueStr + fullTextIndex + " INDEX " + indexName + " ON " + tableName + " (" + columnName + ")")
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.create_index.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_INDEX_FULL_MYSQL)
+ }
+ } else {
+ l4g.Critical(utils.T("store.sql.create_index_missing_driver.critical"))
+ time.Sleep(time.Second)
+ os.Exit(EXIT_CREATE_INDEX_MISSING)
+ }
+
+ return true
+}
+
+func (ss *SqlSupplier) RemoveIndexIfExists(indexName string, tableName string) bool {
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ _, err := ss.GetMaster().SelectStr("SELECT $1::regclass", indexName)
+ // It should fail if the index does not exist
+ if err != nil {
+ return false
+ }
+
+ _, err = ss.GetMaster().ExecNoTimeout("DROP INDEX " + indexName)
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.remove_index.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_REMOVE_INDEX_POSTGRES)
+ }
+
+ return true
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+
+ count, err := ss.GetMaster().SelectInt("SELECT COUNT(0) AS index_exists FROM information_schema.statistics WHERE TABLE_SCHEMA = DATABASE() and table_name = ? AND index_name = ?", tableName, indexName)
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.check_index.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_REMOVE_INDEX_MYSQL)
+ }
+
+ if count <= 0 {
+ return false
+ }
+
+ _, err = ss.GetMaster().ExecNoTimeout("DROP INDEX " + indexName + " ON " + tableName)
+ if err != nil {
+ l4g.Critical(utils.T("store.sql.remove_index.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_REMOVE_INDEX_MYSQL)
+ }
+ } else {
+ l4g.Critical(utils.T("store.sql.create_index_missing_driver.critical"))
+ time.Sleep(time.Second)
+ os.Exit(EXIT_REMOVE_INDEX_MISSING)
+ }
+
+ return true
+}
+
+func IsUniqueConstraintError(err error, indexName []string) bool {
+ unique := false
+ if pqErr, ok := err.(*pq.Error); ok && pqErr.Code == "23505" {
+ unique = true
+ }
+
+ if mysqlErr, ok := err.(*mysql.MySQLError); ok && mysqlErr.Number == 1062 {
+ unique = true
+ }
+
+ field := false
+ for _, contain := range indexName {
+ if strings.Contains(err.Error(), contain) {
+ field = true
+ break
+ }
+ }
+
+ return unique && field
+}
+
+func (ss *SqlSupplier) GetAllConns() []*gorp.DbMap {
+ all := make([]*gorp.DbMap, len(ss.replicas)+1)
+ copy(all, ss.replicas)
+ all[len(ss.replicas)] = ss.master
+ return all
+}
+
+func (ss *SqlSupplier) Close() {
+ l4g.Info(utils.T("store.sql.closing.info"))
+ ss.master.Db.Close()
+ for _, replica := range ss.replicas {
+ replica.Db.Close()
+ }
+}
+
+func (ss *SqlSupplier) Team() store.TeamStore {
+ return ss.oldStores.team
+}
+
+func (ss *SqlSupplier) Channel() store.ChannelStore {
+ return ss.oldStores.channel
+}
+
+func (ss *SqlSupplier) Post() store.PostStore {
+ return ss.oldStores.post
+}
+
+func (ss *SqlSupplier) User() store.UserStore {
+ return ss.oldStores.user
+}
+
+func (ss *SqlSupplier) Session() store.SessionStore {
+ return ss.oldStores.session
+}
+
+func (ss *SqlSupplier) Audit() store.AuditStore {
+ return ss.oldStores.audit
+}
+
+func (ss *SqlSupplier) ClusterDiscovery() store.ClusterDiscoveryStore {
+ return ss.oldStores.cluster
+}
+
+func (ss *SqlSupplier) Compliance() store.ComplianceStore {
+ return ss.oldStores.compliance
+}
+
+func (ss *SqlSupplier) OAuth() store.OAuthStore {
+ return ss.oldStores.oauth
+}
+
+func (ss *SqlSupplier) System() store.SystemStore {
+ return ss.oldStores.system
+}
+
+func (ss *SqlSupplier) Webhook() store.WebhookStore {
+ return ss.oldStores.webhook
+}
+
+func (ss *SqlSupplier) Command() store.CommandStore {
+ return ss.oldStores.command
+}
+
+func (ss *SqlSupplier) CommandWebhook() store.CommandWebhookStore {
+ return ss.oldStores.commandWebhook
+}
+
+func (ss *SqlSupplier) Preference() store.PreferenceStore {
+ return ss.oldStores.preference
+}
+
+func (ss *SqlSupplier) License() store.LicenseStore {
+ return ss.oldStores.license
+}
+
+func (ss *SqlSupplier) Token() store.TokenStore {
+ return ss.oldStores.token
+}
+
+func (ss *SqlSupplier) Emoji() store.EmojiStore {
+ return ss.oldStores.emoji
+}
+
+func (ss *SqlSupplier) Status() store.StatusStore {
+ return ss.oldStores.status
+}
+
+func (ss *SqlSupplier) FileInfo() store.FileInfoStore {
+ return ss.oldStores.fileInfo
+}
+
+func (ss *SqlSupplier) Reaction() store.ReactionStore {
+ return ss.oldStores.reaction
+}
+
+func (ss *SqlSupplier) Job() store.JobStore {
+ return ss.oldStores.job
+}
+
+func (ss *SqlSupplier) UserAccessToken() store.UserAccessTokenStore {
+ return ss.oldStores.userAccessToken
+}
+
+func (ss *SqlSupplier) DropAllTables() {
+ ss.master.TruncateTables()
+}
+
+type mattermConverter struct{}
+
+func (me mattermConverter) ToDb(val interface{}) (interface{}, error) {
+
+ switch t := val.(type) {
+ case model.StringMap:
+ return model.MapToJson(t), nil
+ case map[string]string:
+ return model.MapToJson(model.StringMap(t)), nil
+ case model.StringArray:
+ return model.ArrayToJson(t), nil
+ case model.StringInterface:
+ return model.StringInterfaceToJson(t), nil
+ case map[string]interface{}:
+ return model.StringInterfaceToJson(model.StringInterface(t)), nil
+ }
+
+ return val, nil
+}
+
+func (me mattermConverter) FromDb(target interface{}) (gorp.CustomScanner, bool) {
+ switch target.(type) {
+ case *model.StringMap:
+ binder := func(holder, target interface{}) error {
+ s, ok := holder.(*string)
+ if !ok {
+ return errors.New(utils.T("store.sql.convert_string_map"))
+ }
+ b := []byte(*s)
+ return json.Unmarshal(b, target)
+ }
+ return gorp.CustomScanner{Holder: new(string), Target: target, Binder: binder}, true
+ case *map[string]string:
+ binder := func(holder, target interface{}) error {
+ s, ok := holder.(*string)
+ if !ok {
+ return errors.New(utils.T("store.sql.convert_string_map"))
+ }
+ b := []byte(*s)
+ return json.Unmarshal(b, target)
+ }
+ return gorp.CustomScanner{Holder: new(string), Target: target, Binder: binder}, true
+ case *model.StringArray:
+ binder := func(holder, target interface{}) error {
+ s, ok := holder.(*string)
+ if !ok {
+ return errors.New(utils.T("store.sql.convert_string_array"))
+ }
+ b := []byte(*s)
+ return json.Unmarshal(b, target)
+ }
+ return gorp.CustomScanner{Holder: new(string), Target: target, Binder: binder}, true
+ case *model.StringInterface:
+ binder := func(holder, target interface{}) error {
+ s, ok := holder.(*string)
+ if !ok {
+ return errors.New(utils.T("store.sql.convert_string_interface"))
+ }
+ b := []byte(*s)
+ return json.Unmarshal(b, target)
+ }
+ return gorp.CustomScanner{Holder: new(string), Target: target, Binder: binder}, true
+ case *map[string]interface{}:
+ binder := func(holder, target interface{}) error {
+ s, ok := holder.(*string)
+ if !ok {
+ return errors.New(utils.T("store.sql.convert_string_interface"))
+ }
+ b := []byte(*s)
+ return json.Unmarshal(b, target)
+ }
+ return gorp.CustomScanner{Holder: new(string), Target: target, Binder: binder}, true
+ }
+
+ return gorp.CustomScanner{}, false
+}
+
+func convertMySQLFullTextColumnsToPostgres(columnNames string) string {
+ columns := strings.Split(columnNames, ", ")
+ concatenatedColumnNames := ""
+ for i, c := range columns {
+ concatenatedColumnNames += c
+ if i < len(columns)-1 {
+ concatenatedColumnNames += " || ' ' || "
+ }
+ }
+
+ return concatenatedColumnNames
+}
diff --git a/store/sqlstore/supplier_reactions.go b/store/sqlstore/supplier_reactions.go
new file mode 100644
index 000000000..1732b16ee
--- /dev/null
+++ b/store/sqlstore/supplier_reactions.go
@@ -0,0 +1,213 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "context"
+ "net/http"
+
+ l4g "github.com/alecthomas/log4go"
+
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+func initSqlSupplierReactions(sqlStore SqlStore) {
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Reaction{}, "Reactions").SetKeys(false, "UserId", "PostId", "EmojiName")
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("PostId").SetMaxSize(26)
+ table.ColMap("EmojiName").SetMaxSize(64)
+ }
+}
+
+func (s *SqlSupplier) ReactionSave(ctx context.Context, reaction *model.Reaction, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult {
+ result := store.NewSupplierResult()
+
+ reaction.PreSave()
+ if result.Err = reaction.IsValid(); result.Err != nil {
+ return result
+ }
+
+ if transaction, err := s.GetMaster().Begin(); err != nil {
+ result.Err = model.NewAppError("SqlReactionStore.Save", "store.sql_reaction.save.begin.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ err := saveReactionAndUpdatePost(transaction, reaction)
+
+ if err != nil {
+ transaction.Rollback()
+
+ // We don't consider duplicated save calls as an error
+ if !IsUniqueConstraintError(err, []string{"reactions_pkey", "PRIMARY"}) {
+ result.Err = model.NewAppError("SqlPreferenceStore.Save", "store.sql_reaction.save.save.app_error", nil, err.Error(), http.StatusBadRequest)
+ }
+ } else {
+ if err := transaction.Commit(); err != nil {
+ // don't need to rollback here since the transaction is already closed
+ result.Err = model.NewAppError("SqlPreferenceStore.Save", "store.sql_reaction.save.commit.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ if result.Err == nil {
+ result.Data = reaction
+ }
+ }
+
+ return result
+}
+
+func (s *SqlSupplier) ReactionDelete(ctx context.Context, reaction *model.Reaction, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult {
+ result := store.NewSupplierResult()
+
+ if transaction, err := s.GetMaster().Begin(); err != nil {
+ result.Err = model.NewAppError("SqlReactionStore.Delete", "store.sql_reaction.delete.begin.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ err := deleteReactionAndUpdatePost(transaction, reaction)
+
+ if err != nil {
+ transaction.Rollback()
+
+ result.Err = model.NewAppError("SqlPreferenceStore.Delete", "store.sql_reaction.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else if err := transaction.Commit(); err != nil {
+ // don't need to rollback here since the transaction is already closed
+ result.Err = model.NewAppError("SqlPreferenceStore.Delete", "store.sql_reaction.delete.commit.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = reaction
+ }
+ }
+
+ return result
+}
+
+func (s *SqlSupplier) ReactionGetForPost(ctx context.Context, postId string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult {
+ result := store.NewSupplierResult()
+
+ var reactions []*model.Reaction
+
+ if _, err := s.GetReplica().Select(&reactions,
+ `SELECT
+ *
+ FROM
+ Reactions
+ WHERE
+ PostId = :PostId
+ ORDER BY
+ CreateAt`, map[string]interface{}{"PostId": postId}); err != nil {
+ result.Err = model.NewAppError("SqlReactionStore.GetForPost", "store.sql_reaction.get_for_post.app_error", nil, "", http.StatusInternalServerError)
+ } else {
+ result.Data = reactions
+ }
+
+ return result
+}
+
+func (s *SqlSupplier) ReactionDeleteAllWithEmojiName(ctx context.Context, emojiName string, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult {
+ result := store.NewSupplierResult()
+
+ var reactions []*model.Reaction
+
+ if _, err := s.GetReplica().Select(&reactions,
+ `SELECT
+ *
+ FROM
+ Reactions
+ WHERE
+ EmojiName = :EmojiName`, map[string]interface{}{"EmojiName": emojiName}); err != nil {
+ result.Err = model.NewAppError("SqlReactionStore.DeleteAllWithEmojiName",
+ "store.sql_reaction.delete_all_with_emoji_name.get_reactions.app_error", nil,
+ "emoji_name="+emojiName+", error="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ if _, err := s.GetMaster().Exec(
+ `DELETE FROM
+ Reactions
+ WHERE
+ EmojiName = :EmojiName`, map[string]interface{}{"EmojiName": emojiName}); err != nil {
+ result.Err = model.NewAppError("SqlReactionStore.DeleteAllWithEmojiName",
+ "store.sql_reaction.delete_all_with_emoji_name.delete_reactions.app_error", nil,
+ "emoji_name="+emojiName+", error="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ for _, reaction := range reactions {
+ if _, err := s.GetMaster().Exec(UPDATE_POST_HAS_REACTIONS_QUERY,
+ map[string]interface{}{"PostId": reaction.PostId, "UpdateAt": model.GetMillis()}); err != nil {
+ l4g.Warn(utils.T("store.sql_reaction.delete_all_with_emoji_name.update_post.warn"), reaction.PostId, err.Error())
+ }
+ }
+
+ return result
+}
+
+func (s *SqlSupplier) ReactionPermanentDeleteBatch(ctx context.Context, endTime int64, limit int64, hints ...store.LayeredStoreHint) *store.LayeredStoreSupplierResult {
+ result := store.NewSupplierResult()
+
+ var query string
+ if *utils.Cfg.SqlSettings.DriverName == "postgres" {
+ query = "DELETE from Reactions WHERE Id = any (array (SELECT Id FROM Reactions WHERE CreateAt < :EndTime LIMIT :Limit))"
+ } else {
+ query = "DELETE from Reactions WHERE CreateAt < :EndTime LIMIT :Limit"
+ }
+
+ sqlResult, err := s.GetMaster().Exec(query, map[string]interface{}{"EndTime": endTime, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlReactionStore.PermanentDeleteBatch", "store.sql_reaction.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ } else {
+ rowsAffected, err1 := sqlResult.RowsAffected()
+ if err1 != nil {
+ result.Err = model.NewAppError("SqlReactionStore.PermanentDeleteBatch", "store.sql_reaction.permanent_delete_batch.app_error", nil, ""+err.Error(), http.StatusInternalServerError)
+ result.Data = int64(0)
+ } else {
+ result.Data = rowsAffected
+ }
+ }
+
+ return result
+}
+
+func saveReactionAndUpdatePost(transaction *gorp.Transaction, reaction *model.Reaction) error {
+ if err := transaction.Insert(reaction); err != nil {
+ return err
+ }
+
+ return updatePostForReactions(transaction, reaction.PostId)
+}
+
+func deleteReactionAndUpdatePost(transaction *gorp.Transaction, reaction *model.Reaction) error {
+ if _, err := transaction.Exec(
+ `DELETE FROM
+ Reactions
+ WHERE
+ PostId = :PostId AND
+ UserId = :UserId AND
+ EmojiName = :EmojiName`,
+ map[string]interface{}{"PostId": reaction.PostId, "UserId": reaction.UserId, "EmojiName": reaction.EmojiName}); err != nil {
+ return err
+ }
+
+ return updatePostForReactions(transaction, reaction.PostId)
+}
+
+const (
+ // Set HasReactions = true if and only if the post has reactions, update UpdateAt only if HasReactions changes
+ UPDATE_POST_HAS_REACTIONS_QUERY = `UPDATE
+ Posts
+ SET
+ UpdateAt = (CASE
+ WHEN HasReactions != (SELECT count(0) > 0 FROM Reactions WHERE PostId = :PostId) THEN :UpdateAt
+ ELSE UpdateAt
+ END),
+ HasReactions = (SELECT count(0) > 0 FROM Reactions WHERE PostId = :PostId)
+ WHERE
+ Id = :PostId`
+)
+
+func updatePostForReactions(transaction *gorp.Transaction, postId string) error {
+ _, err := transaction.Exec(UPDATE_POST_HAS_REACTIONS_QUERY, map[string]interface{}{"PostId": postId, "UpdateAt": model.GetMillis()})
+
+ return err
+}
diff --git a/store/sqlstore/system_store.go b/store/sqlstore/system_store.go
new file mode 100644
index 000000000..8d863701a
--- /dev/null
+++ b/store/sqlstore/system_store.go
@@ -0,0 +1,137 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlSystemStore struct {
+ SqlStore
+}
+
+func NewSqlSystemStore(sqlStore SqlStore) store.SystemStore {
+ s := &SqlSystemStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.System{}, "Systems").SetKeys(false, "Name")
+ table.ColMap("Name").SetMaxSize(64)
+ table.ColMap("Value").SetMaxSize(1024)
+ }
+
+ return s
+}
+
+func (s SqlSystemStore) CreateIndexesIfNotExists() {
+}
+
+func (s SqlSystemStore) Save(system *model.System) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if err := s.GetMaster().Insert(system); err != nil {
+ result.Err = model.NewAppError("SqlSystemStore.Save", "store.sql_system.save.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlSystemStore) SaveOrUpdate(system *model.System) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if err := s.GetReplica().SelectOne(&model.System{}, "SELECT * FROM Systems WHERE Name = :Name", map[string]interface{}{"Name": system.Name}); err == nil {
+ if _, err := s.GetMaster().Update(system); err != nil {
+ result.Err = model.NewAppError("SqlSystemStore.SaveOrUpdate", "store.sql_system.update.app_error", nil, "", http.StatusInternalServerError)
+ }
+ } else {
+ if err := s.GetMaster().Insert(system); err != nil {
+ result.Err = model.NewAppError("SqlSystemStore.SaveOrUpdate", "store.sql_system.save.app_error", nil, "", http.StatusInternalServerError)
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlSystemStore) Update(system *model.System) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Update(system); err != nil {
+ result.Err = model.NewAppError("SqlSystemStore.Update", "store.sql_system.update.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlSystemStore) Get() store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var systems []model.System
+ props := make(model.StringMap)
+ if _, err := s.GetReplica().Select(&systems, "SELECT * FROM Systems"); err != nil {
+ result.Err = model.NewAppError("SqlSystemStore.Get", "store.sql_system.get.app_error", nil, "", http.StatusInternalServerError)
+ } else {
+ for _, prop := range systems {
+ props[prop.Name] = prop.Value
+ }
+
+ result.Data = props
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlSystemStore) GetByName(name string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var system model.System
+ if err := s.GetReplica().SelectOne(&system, "SELECT * FROM Systems WHERE Name = :Name", map[string]interface{}{"Name": name}); err != nil {
+ result.Err = model.NewAppError("SqlSystemStore.GetByName", "store.sql_system.get_by_name.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ result.Data = &system
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/system_store_test.go b/store/sqlstore/system_store_test.go
new file mode 100644
index 000000000..752a4daf4
--- /dev/null
+++ b/store/sqlstore/system_store_test.go
@@ -0,0 +1,57 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestSqlSystemStore(t *testing.T) {
+ ss := Setup()
+
+ system := &model.System{Name: model.NewId(), Value: "value"}
+ store.Must(ss.System().Save(system))
+
+ result := <-ss.System().Get()
+ systems := result.Data.(model.StringMap)
+
+ if systems[system.Name] != system.Value {
+ t.Fatal()
+ }
+
+ system.Value = "value2"
+ store.Must(ss.System().Update(system))
+
+ result2 := <-ss.System().Get()
+ systems2 := result2.Data.(model.StringMap)
+
+ if systems2[system.Name] != system.Value {
+ t.Fatal()
+ }
+
+ result3 := <-ss.System().GetByName(system.Name)
+ rsystem := result3.Data.(*model.System)
+ if rsystem.Value != system.Value {
+ t.Fatal()
+ }
+}
+
+func TestSqlSystemStoreSaveOrUpdate(t *testing.T) {
+ ss := Setup()
+
+ system := &model.System{Name: model.NewId(), Value: "value"}
+
+ if err := (<-ss.System().SaveOrUpdate(system)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ system.Value = "value2"
+
+ if r := <-ss.System().SaveOrUpdate(system); r.Err != nil {
+ t.Fatal(r.Err)
+ }
+}
diff --git a/store/sqlstore/team_store.go b/store/sqlstore/team_store.go
new file mode 100644
index 000000000..1b899da46
--- /dev/null
+++ b/store/sqlstore/team_store.go
@@ -0,0 +1,837 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+ "strconv"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+const (
+ TEAM_MEMBER_EXISTS_ERROR = "store.sql_team.save_member.exists.app_error"
+)
+
+type SqlTeamStore struct {
+ SqlStore
+}
+
+func NewSqlTeamStore(sqlStore SqlStore) store.TeamStore {
+ s := &SqlTeamStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Team{}, "Teams").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("DisplayName").SetMaxSize(64)
+ table.ColMap("Name").SetMaxSize(64).SetUnique(true)
+ table.ColMap("Description").SetMaxSize(255)
+ table.ColMap("Email").SetMaxSize(128)
+ table.ColMap("CompanyName").SetMaxSize(64)
+ table.ColMap("AllowedDomains").SetMaxSize(500)
+ table.ColMap("InviteId").SetMaxSize(32)
+
+ tablem := db.AddTableWithName(model.TeamMember{}, "TeamMembers").SetKeys(false, "TeamId", "UserId")
+ tablem.ColMap("TeamId").SetMaxSize(26)
+ tablem.ColMap("UserId").SetMaxSize(26)
+ tablem.ColMap("Roles").SetMaxSize(64)
+ }
+
+ return s
+}
+
+func (s SqlTeamStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_teams_name", "Teams", "Name")
+ s.RemoveIndexIfExists("idx_teams_description", "Teams")
+ s.CreateIndexIfNotExists("idx_teams_invite_id", "Teams", "InviteId")
+ s.CreateIndexIfNotExists("idx_teams_update_at", "Teams", "UpdateAt")
+ s.CreateIndexIfNotExists("idx_teams_create_at", "Teams", "CreateAt")
+ s.CreateIndexIfNotExists("idx_teams_delete_at", "Teams", "DeleteAt")
+
+ s.CreateIndexIfNotExists("idx_teammembers_team_id", "TeamMembers", "TeamId")
+ s.CreateIndexIfNotExists("idx_teammembers_user_id", "TeamMembers", "UserId")
+ s.CreateIndexIfNotExists("idx_teammembers_delete_at", "TeamMembers", "DeleteAt")
+}
+
+func (s SqlTeamStore) Save(team *model.Team) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(team.Id) > 0 {
+ result.Err = model.NewAppError("SqlTeamStore.Save",
+ "store.sql_team.save.existing.app_error", nil, "id="+team.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ team.PreSave()
+
+ if result.Err = team.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(team); err != nil {
+ if IsUniqueConstraintError(err, []string{"Name", "teams_name_key"}) {
+ result.Err = model.NewAppError("SqlTeamStore.Save", "store.sql_team.save.domain_exists.app_error", nil, "id="+team.Id+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlTeamStore.Save", "store.sql_team.save.app_error", nil, "id="+team.Id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = team
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) Update(team *model.Team) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ team.PreUpdate()
+
+ if result.Err = team.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if oldResult, err := s.GetMaster().Get(model.Team{}, team.Id); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.Update", "store.sql_team.update.finding.app_error", nil, "id="+team.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else if oldResult == nil {
+ result.Err = model.NewAppError("SqlTeamStore.Update", "store.sql_team.update.find.app_error", nil, "id="+team.Id, http.StatusBadRequest)
+ } else {
+ oldTeam := oldResult.(*model.Team)
+ team.CreateAt = oldTeam.CreateAt
+ team.UpdateAt = model.GetMillis()
+ team.Name = oldTeam.Name
+
+ if count, err := s.GetMaster().Update(team); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.Update", "store.sql_team.update.updating.app_error", nil, "id="+team.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else if count != 1 {
+ result.Err = model.NewAppError("SqlTeamStore.Update", "store.sql_team.update.app_error", nil, "id="+team.Id, http.StatusInternalServerError)
+ } else {
+ result.Data = team
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) UpdateDisplayName(name string, teamId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("UPDATE Teams SET DisplayName = :Name WHERE Id = :Id", map[string]interface{}{"Name": name, "Id": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.UpdateName", "store.sql_team.update_display_name.app_error", nil, "team_id="+teamId, http.StatusInternalServerError)
+ } else {
+ result.Data = teamId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) Get(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if obj, err := s.GetReplica().Get(model.Team{}, id); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.Get", "store.sql_team.get.finding.app_error", nil, "id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlTeamStore.Get", "store.sql_team.get.find.app_error", nil, "id="+id, http.StatusNotFound)
+ } else {
+ team := obj.(*model.Team)
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+
+ result.Data = team
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetByInviteId(inviteId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ team := model.Team{}
+
+ if err := s.GetReplica().SelectOne(&team, "SELECT * FROM Teams WHERE Id = :InviteId OR InviteId = :InviteId", map[string]interface{}{"InviteId": inviteId}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetByInviteId", "store.sql_team.get_by_invite_id.finding.app_error", nil, "inviteId="+inviteId+", "+err.Error(), http.StatusNotFound)
+ }
+
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+
+ if len(inviteId) == 0 || team.InviteId != inviteId {
+ result.Err = model.NewAppError("SqlTeamStore.GetByInviteId", "store.sql_team.get_by_invite_id.find.app_error", nil, "inviteId="+inviteId, http.StatusNotFound)
+ }
+
+ result.Data = &team
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetByName(name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ team := model.Team{}
+
+ if err := s.GetReplica().SelectOne(&team, "SELECT * FROM Teams WHERE Name = :Name", map[string]interface{}{"Name": name}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetByName", "store.sql_team.get_by_name.app_error", nil, "name="+name+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+
+ result.Data = &team
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) SearchByName(name string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var teams []*model.Team
+
+ if _, err := s.GetReplica().Select(&teams, "SELECT * FROM Teams WHERE Name LIKE :Name", map[string]interface{}{"Name": name + "%"}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.SearchByName", "store.sql_team.get_by_name.app_error", nil, "name="+name+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = teams
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) SearchAll(term string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var teams []*model.Team
+
+ if _, err := s.GetReplica().Select(&teams, "SELECT * FROM Teams WHERE Name LIKE :Term OR DisplayName LIKE :Term", map[string]interface{}{"Term": term + "%"}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.SearchAll", "store.sql_team.search_all_team.app_error", nil, "term="+term+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = teams
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) SearchOpen(term string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var teams []*model.Team
+
+ if _, err := s.GetReplica().Select(&teams, "SELECT * FROM Teams WHERE Type = 'O' AND AllowOpenInvite = true AND (Name LIKE :Term OR DisplayName LIKE :Term)", map[string]interface{}{"Term": term + "%"}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.SearchOpen", "store.sql_team.search_open_team.app_error", nil, "term="+term+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = teams
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetAll() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.Team
+ if _, err := s.GetReplica().Select(&data, "SELECT * FROM Teams"); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetAllTeams", "store.sql_team.get_all.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ for _, team := range data {
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetAllPage(offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.Team
+ if _, err := s.GetReplica().Select(&data, "SELECT * FROM Teams LIMIT :Limit OFFSET :Offset", map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetAllTeams", "store.sql_team.get_all.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ for _, team := range data {
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetTeamsByUserId(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.Team
+ if _, err := s.GetReplica().Select(&data, "SELECT Teams.* FROM Teams, TeamMembers WHERE TeamMembers.TeamId = Teams.Id AND TeamMembers.UserId = :UserId AND TeamMembers.DeleteAt = 0 AND Teams.DeleteAt = 0", map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetTeamsByUserId", "store.sql_team.get_all.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ for _, team := range data {
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetAllTeamListing() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := "SELECT * FROM Teams WHERE AllowOpenInvite = 1"
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query = "SELECT * FROM Teams WHERE AllowOpenInvite = true"
+ }
+
+ var data []*model.Team
+ if _, err := s.GetReplica().Select(&data, query); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetAllTeamListing", "store.sql_team.get_all_team_listing.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ for _, team := range data {
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetAllTeamPageListing(offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := "SELECT * FROM Teams WHERE AllowOpenInvite = 1 LIMIT :Limit OFFSET :Offset"
+
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query = "SELECT * FROM Teams WHERE AllowOpenInvite = true LIMIT :Limit OFFSET :Offset"
+ }
+
+ var data []*model.Team
+ if _, err := s.GetReplica().Select(&data, query, map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetAllTeamListing", "store.sql_team.get_all_team_listing.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ for _, team := range data {
+ if len(team.InviteId) == 0 {
+ team.InviteId = team.Id
+ }
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) PermanentDelete(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM Teams WHERE Id = :TeamId", map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.Delete", "store.sql_team.permanent_delete.app_error", nil, "teamId="+teamId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) AnalyticsTeamCount() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if c, err := s.GetReplica().SelectInt("SELECT COUNT(*) FROM Teams WHERE DeleteAt = 0", map[string]interface{}{}); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.AnalyticsTeamCount", "store.sql_team.analytics_team_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = c
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) SaveMember(member *model.TeamMember) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if result.Err = member.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if count, err := s.GetMaster().SelectInt(
+ `SELECT
+ COUNT(0)
+ FROM
+ TeamMembers
+ INNER JOIN
+ Users
+ ON
+ TeamMembers.UserId = Users.Id
+ WHERE
+ TeamId = :TeamId
+ AND TeamMembers.DeleteAt = 0
+ AND Users.DeleteAt = 0`, map[string]interface{}{"TeamId": member.TeamId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.Save", "store.sql_user.save.member_count.app_error", nil, "teamId="+member.TeamId+", "+err.Error(), http.StatusInternalServerError)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else if int(count) >= *utils.Cfg.TeamSettings.MaxUsersPerTeam {
+ result.Err = model.NewAppError("SqlUserStore.Save", "store.sql_user.save.max_accounts.app_error", nil, "teamId="+member.TeamId, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(member); err != nil {
+ if IsUniqueConstraintError(err, []string{"TeamId", "teammembers_pkey", "PRIMARY"}) {
+ result.Err = model.NewAppError("SqlTeamStore.SaveMember", TEAM_MEMBER_EXISTS_ERROR, nil, "team_id="+member.TeamId+", user_id="+member.UserId+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlTeamStore.SaveMember", "store.sql_team.save_member.save.app_error", nil, "team_id="+member.TeamId+", user_id="+member.UserId+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = member
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) UpdateMember(member *model.TeamMember) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ member.PreUpdate()
+
+ if result.Err = member.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if _, err := s.GetMaster().Update(member); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.UpdateMember", "store.sql_team.save_member.save.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = member
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetMember(teamId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var member model.TeamMember
+ err := s.GetReplica().SelectOne(&member, "SELECT * FROM TeamMembers WHERE TeamId = :TeamId AND UserId = :UserId", map[string]interface{}{"TeamId": teamId, "UserId": userId})
+ if err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlTeamStore.GetMember", "store.sql_team.get_member.missing.app_error", nil, "teamId="+teamId+" userId="+userId+" "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlTeamStore.GetMember", "store.sql_team.get_member.app_error", nil, "teamId="+teamId+" userId="+userId+" "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = &member
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetMembers(teamId string, offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var members []*model.TeamMember
+ _, err := s.GetReplica().Select(&members, "SELECT * FROM TeamMembers WHERE TeamId = :TeamId AND DeleteAt = 0 LIMIT :Limit OFFSET :Offset", map[string]interface{}{"TeamId": teamId, "Offset": offset, "Limit": limit})
+ if err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetMembers", "store.sql_team.get_members.app_error", nil, "teamId="+teamId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = members
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetTotalMemberCount(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ count, err := s.GetReplica().SelectInt(`
+ SELECT
+ count(*)
+ FROM
+ TeamMembers,
+ Users
+ WHERE
+ TeamMembers.UserId = Users.Id
+ AND TeamMembers.TeamId = :TeamId
+ AND TeamMembers.DeleteAt = 0`, map[string]interface{}{"TeamId": teamId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetTotalMemberCount", "store.sql_team.get_member_count.app_error", nil, "teamId="+teamId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetActiveMemberCount(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ count, err := s.GetReplica().SelectInt(`
+ SELECT
+ count(*)
+ FROM
+ TeamMembers,
+ Users
+ WHERE
+ TeamMembers.UserId = Users.Id
+ AND TeamMembers.TeamId = :TeamId
+ AND TeamMembers.DeleteAt = 0
+ AND Users.DeleteAt = 0`, map[string]interface{}{"TeamId": teamId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetActiveMemberCount", "store.sql_team.get_member_count.app_error", nil, "teamId="+teamId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetMembersByIds(teamId string, userIds []string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var members []*model.TeamMember
+ props := make(map[string]interface{})
+ idQuery := ""
+
+ for index, userId := range userIds {
+ if len(idQuery) > 0 {
+ idQuery += ", "
+ }
+
+ props["userId"+strconv.Itoa(index)] = userId
+ idQuery += ":userId" + strconv.Itoa(index)
+ }
+
+ props["TeamId"] = teamId
+
+ if _, err := s.GetReplica().Select(&members, "SELECT * FROM TeamMembers WHERE TeamId = :TeamId AND UserId IN ("+idQuery+") AND DeleteAt = 0", props); err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetMembersByIds", "store.sql_team.get_members_by_ids.app_error", nil, "teamId="+teamId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = members
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetTeamsForUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var members []*model.TeamMember
+ _, err := s.GetReplica().Select(&members, "SELECT * FROM TeamMembers WHERE UserId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetMembers", "store.sql_team.get_members.app_error", nil, "userId="+userId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = members
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetChannelUnreadsForAllTeams(excludeTeamId, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.ChannelUnread
+ _, err := s.GetReplica().Select(&data,
+ `SELECT
+ Channels.TeamId TeamId, Channels.Id ChannelId, (Channels.TotalMsgCount - ChannelMembers.MsgCount) MsgCount, ChannelMembers.MentionCount MentionCount, ChannelMembers.NotifyProps NotifyProps
+ FROM
+ Channels, ChannelMembers
+ WHERE
+ Id = ChannelId
+ AND UserId = :UserId
+ AND DeleteAt = 0
+ AND TeamId != :TeamId`,
+ map[string]interface{}{"UserId": userId, "TeamId": excludeTeamId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetChannelUnreadsForAllTeams", "store.sql_team.get_unread.app_error", nil, "userId="+userId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = data
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) GetChannelUnreadsForTeam(teamId, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.ChannelUnread
+ _, err := s.GetReplica().Select(&data,
+ `SELECT
+ Channels.TeamId TeamId, Channels.Id ChannelId, (Channels.TotalMsgCount - ChannelMembers.MsgCount) MsgCount, ChannelMembers.MentionCount MentionCount, ChannelMembers.NotifyProps NotifyProps
+ FROM
+ Channels, ChannelMembers
+ WHERE
+ Id = ChannelId
+ AND UserId = :UserId
+ AND TeamId = :TeamId
+ AND DeleteAt = 0`,
+ map[string]interface{}{"TeamId": teamId, "UserId": userId})
+
+ if err != nil {
+ result.Err = model.NewAppError("SqlTeamStore.GetChannelUnreadsForTeam", "store.sql_team.get_unread.app_error", nil, "teamId="+teamId+" "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = data
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) RemoveMember(teamId string, userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM TeamMembers WHERE TeamId = :TeamId AND UserId = :UserId", map[string]interface{}{"TeamId": teamId, "UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.RemoveMember", "store.sql_team.remove_member.app_error", nil, "team_id="+teamId+", user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) RemoveAllMembersByTeam(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM TeamMembers WHERE TeamId = :TeamId", map[string]interface{}{"TeamId": teamId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.RemoveMember", "store.sql_team.remove_member.app_error", nil, "team_id="+teamId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTeamStore) RemoveAllMembersByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM TeamMembers WHERE UserId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlChannelStore.RemoveMember", "store.sql_team.remove_member.app_error", nil, "user_id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/team_store_test.go b/store/sqlstore/team_store_test.go
new file mode 100644
index 000000000..0cfe483f7
--- /dev/null
+++ b/store/sqlstore/team_store_test.go
@@ -0,0 +1,1028 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+func TestTeamStoreSave(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Team().Save(&o1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+
+ o1.Id = ""
+ if err := (<-ss.Team().Save(&o1)).Err; err == nil {
+ t.Fatal("should be unique domain")
+ }
+}
+
+func TestTeamStoreUpdate(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ time.Sleep(100 * time.Millisecond)
+
+ if err := (<-ss.Team().Update(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ o1.Id = "missing"
+ if err := (<-ss.Team().Update(&o1)).Err; err == nil {
+ t.Fatal("Update should have failed because of missing key")
+ }
+
+ o1.Id = model.NewId()
+ if err := (<-ss.Team().Update(&o1)).Err; err == nil {
+ t.Fatal("Update should have faile because id change")
+ }
+}
+
+func TestTeamStoreUpdateDisplayName(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Team{}
+ o1.DisplayName = "Display Name"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1 = (<-ss.Team().Save(o1)).Data.(*model.Team)
+
+ newDisplayName := "NewDisplayName"
+
+ if err := (<-ss.Team().UpdateDisplayName(newDisplayName, o1.Id)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ ro1 := (<-ss.Team().Get(o1.Id)).Data.(*model.Team)
+ if ro1.DisplayName != newDisplayName {
+ t.Fatal("DisplayName not updated")
+ }
+}
+
+func TestTeamStoreGet(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ store.Must(ss.Team().Save(&o1))
+
+ if r1 := <-ss.Team().Get(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Team).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+ }
+
+ if err := (<-ss.Team().Get("")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestTeamStoreGetByName(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.Team().GetByName(o1.Name); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Team).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+ }
+
+ if err := (<-ss.Team().GetByName("")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestTeamStoreSearchByName(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ var name = "zzz" + model.NewId()
+ o1.Name = name + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.Team().SearchByName(name); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.([]*model.Team)[0].ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+ }
+}
+
+func TestTeamStoreSearchAll(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "ADisplayName" + model.NewId()
+ o1.Name = "zz" + model.NewId() + "a"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ p2 := model.Team{}
+ p2.DisplayName = "BDisplayName" + model.NewId()
+ p2.Name = "b" + model.NewId() + "b"
+ p2.Email = model.NewId() + "@nowhere.com"
+ p2.Type = model.TEAM_INVITE
+
+ if err := (<-ss.Team().Save(&p2)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ r1 := <-ss.Team().SearchAll(o1.Name)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 1 {
+ t.Fatal("should have returned 1 team")
+ }
+ if r1.Data.([]*model.Team)[0].ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+
+ r1 = <-ss.Team().SearchAll(p2.DisplayName)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 1 {
+ t.Fatal("should have returned 1 team")
+ }
+ if r1.Data.([]*model.Team)[0].ToJson() != p2.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+
+ r1 = <-ss.Team().SearchAll("junk")
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 0 {
+ t.Fatal("should have not returned a team")
+ }
+}
+
+func TestTeamStoreSearchOpen(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "ADisplayName" + model.NewId()
+ o1.Name = "zz" + model.NewId() + "a"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.AllowOpenInvite = true
+
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ o2 := model.Team{}
+ o2.DisplayName = "ADisplayName" + model.NewId()
+ o2.Name = "zz" + model.NewId() + "a"
+ o2.Email = model.NewId() + "@nowhere.com"
+ o2.Type = model.TEAM_OPEN
+ o2.AllowOpenInvite = false
+
+ if err := (<-ss.Team().Save(&o2)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ p2 := model.Team{}
+ p2.DisplayName = "BDisplayName" + model.NewId()
+ p2.Name = "b" + model.NewId() + "b"
+ p2.Email = model.NewId() + "@nowhere.com"
+ p2.Type = model.TEAM_INVITE
+ p2.AllowOpenInvite = true
+
+ if err := (<-ss.Team().Save(&p2)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ r1 := <-ss.Team().SearchOpen(o1.Name)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 1 {
+ t.Fatal("should have returned 1 team")
+ }
+ if r1.Data.([]*model.Team)[0].ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+
+ r1 = <-ss.Team().SearchOpen(o1.DisplayName)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 1 {
+ t.Fatal("should have returned 1 team")
+ }
+ if r1.Data.([]*model.Team)[0].ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+
+ r1 = <-ss.Team().SearchOpen(p2.Name)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 0 {
+ t.Fatal("should have not returned a team")
+ }
+
+ r1 = <-ss.Team().SearchOpen(p2.DisplayName)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 0 {
+ t.Fatal("should have not returned a team")
+ }
+
+ r1 = <-ss.Team().SearchOpen("junk")
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 0 {
+ t.Fatal("should have not returned a team")
+ }
+
+ r1 = <-ss.Team().SearchOpen(o2.DisplayName)
+ if r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+ if len(r1.Data.([]*model.Team)) != 0 {
+ t.Fatal("should have not returned a team")
+ }
+}
+
+func TestTeamStoreGetByIniviteId(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.InviteId = model.NewId()
+
+ if err := (<-ss.Team().Save(&o1)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ o2 := model.Team{}
+ o2.DisplayName = "DisplayName"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Email = model.NewId() + "@nowhere.com"
+ o2.Type = model.TEAM_OPEN
+
+ if err := (<-ss.Team().Save(&o2)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.Team().GetByInviteId(o1.InviteId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Team).ToJson() != o1.ToJson() {
+ t.Fatal("invalid returned team")
+ }
+ }
+
+ o2.InviteId = ""
+ <-ss.Team().Update(&o2)
+
+ if r1 := <-ss.Team().GetByInviteId(o2.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.Team).Id != o2.Id {
+ t.Fatal("invalid returned team")
+ }
+ }
+
+ if err := (<-ss.Team().GetByInviteId("")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestTeamStoreByUserId(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.InviteId = model.NewId()
+ o1 = store.Must(ss.Team().Save(o1)).(*model.Team)
+
+ m1 := &model.TeamMember{TeamId: o1.Id, UserId: model.NewId()}
+ store.Must(ss.Team().SaveMember(m1))
+
+ if r1 := <-ss.Team().GetTeamsByUserId(m1.UserId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ teams := r1.Data.([]*model.Team)
+ if len(teams) == 0 {
+ t.Fatal("Should return a team")
+ }
+
+ if teams[0].Id != o1.Id {
+ t.Fatal("should be a member")
+ }
+
+ }
+}
+
+func TestGetAllTeamListing(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o1))
+
+ o2 := model.Team{}
+ o2.DisplayName = "DisplayName"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Email = model.NewId() + "@nowhere.com"
+ o2.Type = model.TEAM_OPEN
+ store.Must(ss.Team().Save(&o2))
+
+ o3 := model.Team{}
+ o3.DisplayName = "DisplayName"
+ o3.Name = "z-z-z" + model.NewId() + "b"
+ o3.Email = model.NewId() + "@nowhere.com"
+ o3.Type = model.TEAM_INVITE
+ o3.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o3))
+
+ o4 := model.Team{}
+ o4.DisplayName = "DisplayName"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Email = model.NewId() + "@nowhere.com"
+ o4.Type = model.TEAM_INVITE
+ store.Must(ss.Team().Save(&o4))
+
+ if r1 := <-ss.Team().GetAllTeamListing(); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ teams := r1.Data.([]*model.Team)
+
+ for _, team := range teams {
+ if !team.AllowOpenInvite {
+ t.Fatal("should have returned team with AllowOpenInvite as true")
+ }
+ }
+
+ if len(teams) == 0 {
+ t.Fatal("failed team listing")
+ }
+ }
+}
+
+func TestGetAllTeamPageListing(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o1))
+
+ o2 := model.Team{}
+ o2.DisplayName = "DisplayName"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Email = model.NewId() + "@nowhere.com"
+ o2.Type = model.TEAM_OPEN
+ o2.AllowOpenInvite = false
+ store.Must(ss.Team().Save(&o2))
+
+ o3 := model.Team{}
+ o3.DisplayName = "DisplayName"
+ o3.Name = "z-z-z" + model.NewId() + "b"
+ o3.Email = model.NewId() + "@nowhere.com"
+ o3.Type = model.TEAM_INVITE
+ o3.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o3))
+
+ o4 := model.Team{}
+ o4.DisplayName = "DisplayName"
+ o4.Name = "zz" + model.NewId() + "b"
+ o4.Email = model.NewId() + "@nowhere.com"
+ o4.Type = model.TEAM_INVITE
+ o4.AllowOpenInvite = false
+ store.Must(ss.Team().Save(&o4))
+
+ if r1 := <-ss.Team().GetAllTeamPageListing(0, 10); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ teams := r1.Data.([]*model.Team)
+
+ for _, team := range teams {
+ if !team.AllowOpenInvite {
+ t.Fatal("should have returned team with AllowOpenInvite as true")
+ }
+ }
+
+ if len(teams) > 10 {
+ t.Fatal("should have returned max of 10 teams")
+ }
+ }
+
+ o5 := model.Team{}
+ o5.DisplayName = "DisplayName"
+ o5.Name = "z-z-z" + model.NewId() + "b"
+ o5.Email = model.NewId() + "@nowhere.com"
+ o5.Type = model.TEAM_OPEN
+ o5.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o5))
+
+ if r1 := <-ss.Team().GetAllTeamPageListing(0, 4); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ teams := r1.Data.([]*model.Team)
+
+ for _, team := range teams {
+ if !team.AllowOpenInvite {
+ t.Fatal("should have returned team with AllowOpenInvite as true")
+ }
+ }
+
+ if len(teams) > 4 {
+ t.Fatal("should have returned max of 4 teams")
+ }
+ }
+
+ if r1 := <-ss.Team().GetAllTeamPageListing(1, 1); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ teams := r1.Data.([]*model.Team)
+
+ for _, team := range teams {
+ if !team.AllowOpenInvite {
+ t.Fatal("should have returned team with AllowOpenInvite as true")
+ }
+ }
+
+ if len(teams) > 1 {
+ t.Fatal("should have returned max of 1 team")
+ }
+ }
+}
+
+func TestDelete(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o1))
+
+ o2 := model.Team{}
+ o2.DisplayName = "DisplayName"
+ o2.Name = "zz" + model.NewId() + "b"
+ o2.Email = model.NewId() + "@nowhere.com"
+ o2.Type = model.TEAM_OPEN
+ store.Must(ss.Team().Save(&o2))
+
+ if r1 := <-ss.Team().PermanentDelete(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+}
+
+func TestTeamCount(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.Team{}
+ o1.DisplayName = "DisplayName"
+ o1.Name = "z-z-z" + model.NewId() + "b"
+ o1.Email = model.NewId() + "@nowhere.com"
+ o1.Type = model.TEAM_OPEN
+ o1.AllowOpenInvite = true
+ store.Must(ss.Team().Save(&o1))
+
+ if r1 := <-ss.Team().AnalyticsTeamCount(); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(int64) == 0 {
+ t.Fatal("should be at least 1 team")
+ }
+ }
+}
+
+func TestTeamMembers(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+ teamId2 := model.NewId()
+
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: model.NewId()}
+ m2 := &model.TeamMember{TeamId: teamId1, UserId: model.NewId()}
+ m3 := &model.TeamMember{TeamId: teamId2, UserId: model.NewId()}
+
+ if r1 := <-ss.Team().SaveMember(m1); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ store.Must(ss.Team().SaveMember(m2))
+ store.Must(ss.Team().SaveMember(m3))
+
+ if r1 := <-ss.Team().GetMembers(teamId1, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 2 {
+ t.Fatal()
+ }
+ }
+
+ if r1 := <-ss.Team().GetMembers(teamId2, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 1 {
+ t.Fatal()
+ }
+
+ if ms[0].UserId != m3.UserId {
+ t.Fatal()
+
+ }
+ }
+
+ if r1 := <-ss.Team().GetTeamsForUser(m1.UserId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 1 {
+ t.Fatal()
+ }
+
+ if ms[0].TeamId != m1.TeamId {
+ t.Fatal()
+
+ }
+ }
+
+ if r1 := <-ss.Team().RemoveMember(teamId1, m1.UserId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ if r1 := <-ss.Team().GetMembers(teamId1, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 1 {
+ t.Fatal()
+ }
+
+ if ms[0].UserId != m2.UserId {
+ t.Fatal()
+
+ }
+ }
+
+ store.Must(ss.Team().SaveMember(m1))
+
+ if r1 := <-ss.Team().RemoveAllMembersByTeam(teamId1); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ if r1 := <-ss.Team().GetMembers(teamId1, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 0 {
+ t.Fatal()
+ }
+ }
+
+ uid := model.NewId()
+ m4 := &model.TeamMember{TeamId: teamId1, UserId: uid}
+ m5 := &model.TeamMember{TeamId: teamId2, UserId: uid}
+ store.Must(ss.Team().SaveMember(m4))
+ store.Must(ss.Team().SaveMember(m5))
+
+ if r1 := <-ss.Team().GetTeamsForUser(uid); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 2 {
+ t.Fatal()
+ }
+ }
+
+ if r1 := <-ss.Team().RemoveAllMembersByUser(uid); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ if r1 := <-ss.Team().GetTeamsForUser(m1.UserId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.TeamMember)
+
+ if len(ms) != 0 {
+ t.Fatal()
+ }
+ }
+}
+
+func TestSaveTeamMemberMaxMembers(t *testing.T) {
+ ss := Setup()
+
+ MaxUsersPerTeam := *utils.Cfg.TeamSettings.MaxUsersPerTeam
+ defer func() {
+ *utils.Cfg.TeamSettings.MaxUsersPerTeam = MaxUsersPerTeam
+ }()
+ *utils.Cfg.TeamSettings.MaxUsersPerTeam = 5
+
+ team := store.Must(ss.Team().Save(&model.Team{
+ DisplayName: "DisplayName",
+ Name: "z-z-z" + model.NewId() + "b",
+ Type: model.TEAM_OPEN,
+ })).(*model.Team)
+ defer func() {
+ <-ss.Team().PermanentDelete(team.Id)
+ }()
+
+ userIds := make([]string, *utils.Cfg.TeamSettings.MaxUsersPerTeam)
+
+ for i := 0; i < *utils.Cfg.TeamSettings.MaxUsersPerTeam; i++ {
+ userIds[i] = store.Must(ss.User().Save(&model.User{
+ Username: model.NewId(),
+ Email: model.NewId(),
+ })).(*model.User).Id
+
+ defer func(userId string) {
+ <-ss.User().PermanentDelete(userId)
+ }(userIds[i])
+
+ store.Must(ss.Team().SaveMember(&model.TeamMember{
+ TeamId: team.Id,
+ UserId: userIds[i],
+ }))
+
+ defer func(userId string) {
+ <-ss.Team().RemoveMember(team.Id, userId)
+ }(userIds[i])
+ }
+
+ if result := <-ss.Team().GetTotalMemberCount(team.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if count := result.Data.(int64); int(count) != *utils.Cfg.TeamSettings.MaxUsersPerTeam {
+ t.Fatalf("should start with 5 team members, had %v instead", count)
+ }
+
+ newUserId := store.Must(ss.User().Save(&model.User{
+ Username: model.NewId(),
+ Email: model.NewId(),
+ })).(*model.User).Id
+ defer func() {
+ <-ss.User().PermanentDelete(newUserId)
+ }()
+
+ if result := <-ss.Team().SaveMember(&model.TeamMember{
+ TeamId: team.Id,
+ UserId: newUserId,
+ }); result.Err == nil {
+ t.Fatal("shouldn't be able to save member when at maximum members per team")
+ }
+
+ if result := <-ss.Team().GetTotalMemberCount(team.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if count := result.Data.(int64); int(count) != *utils.Cfg.TeamSettings.MaxUsersPerTeam {
+ t.Fatalf("should still have 5 team members, had %v instead", count)
+ }
+
+ // Leaving the team from the UI sets DeleteAt instead of using TeamStore.RemoveMember
+ store.Must(ss.Team().UpdateMember(&model.TeamMember{
+ TeamId: team.Id,
+ UserId: userIds[0],
+ DeleteAt: 1234,
+ }))
+
+ if result := <-ss.Team().GetTotalMemberCount(team.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if count := result.Data.(int64); int(count) != *utils.Cfg.TeamSettings.MaxUsersPerTeam-1 {
+ t.Fatalf("should now only have 4 team members, had %v instead", count)
+ }
+
+ if result := <-ss.Team().SaveMember(&model.TeamMember{TeamId: team.Id, UserId: newUserId}); result.Err != nil {
+ t.Fatal("should've been able to save new member after deleting one", result.Err)
+ } else {
+ defer func(userId string) {
+ <-ss.Team().RemoveMember(team.Id, userId)
+ }(newUserId)
+ }
+
+ if result := <-ss.Team().GetTotalMemberCount(team.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if count := result.Data.(int64); int(count) != *utils.Cfg.TeamSettings.MaxUsersPerTeam {
+ t.Fatalf("should have 5 team members again, had %v instead", count)
+ }
+
+ // Deactivating a user should make them stop counting against max members
+ user2 := store.Must(ss.User().Get(userIds[1])).(*model.User)
+ user2.DeleteAt = 1234
+ store.Must(ss.User().Update(user2, true))
+
+ newUserId2 := store.Must(ss.User().Save(&model.User{
+ Username: model.NewId(),
+ Email: model.NewId(),
+ })).(*model.User).Id
+ if result := <-ss.Team().SaveMember(&model.TeamMember{TeamId: team.Id, UserId: newUserId2}); result.Err != nil {
+ t.Fatal("should've been able to save new member after deleting one", result.Err)
+ } else {
+ defer func(userId string) {
+ <-ss.Team().RemoveMember(team.Id, userId)
+ }(newUserId2)
+ }
+}
+
+func TestGetTeamMember(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: model.NewId()}
+ store.Must(ss.Team().SaveMember(m1))
+
+ if r := <-ss.Team().GetMember(m1.TeamId, m1.UserId); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ rm1 := r.Data.(*model.TeamMember)
+
+ if rm1.TeamId != m1.TeamId {
+ t.Fatal("bad team id")
+ }
+
+ if rm1.UserId != m1.UserId {
+ t.Fatal("bad user id")
+ }
+ }
+
+ if r := <-ss.Team().GetMember(m1.TeamId, ""); r.Err == nil {
+ t.Fatal("empty user id - should have failed")
+ }
+
+ if r := <-ss.Team().GetMember("", m1.UserId); r.Err == nil {
+ t.Fatal("empty team id - should have failed")
+ }
+}
+
+func TestGetTeamMembersByIds(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: model.NewId()}
+ store.Must(ss.Team().SaveMember(m1))
+
+ if r := <-ss.Team().GetMembersByIds(m1.TeamId, []string{m1.UserId}); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ rm1 := r.Data.([]*model.TeamMember)[0]
+
+ if rm1.TeamId != m1.TeamId {
+ t.Fatal("bad team id")
+ }
+
+ if rm1.UserId != m1.UserId {
+ t.Fatal("bad user id")
+ }
+ }
+
+ m2 := &model.TeamMember{TeamId: teamId1, UserId: model.NewId()}
+ store.Must(ss.Team().SaveMember(m2))
+
+ if r := <-ss.Team().GetMembersByIds(m1.TeamId, []string{m1.UserId, m2.UserId, model.NewId()}); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ rm := r.Data.([]*model.TeamMember)
+
+ if len(rm) != 2 {
+ t.Fatal("return wrong number of results")
+ }
+ }
+
+ if r := <-ss.Team().GetMembersByIds(m1.TeamId, []string{}); r.Err == nil {
+ t.Fatal("empty user ids - should have failed")
+ }
+}
+
+func TestTeamStoreMemberCount(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.DeleteAt = 1
+ store.Must(ss.User().Save(u2))
+
+ teamId1 := model.NewId()
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: u1.Id}
+ store.Must(ss.Team().SaveMember(m1))
+
+ m2 := &model.TeamMember{TeamId: teamId1, UserId: u2.Id}
+ store.Must(ss.Team().SaveMember(m2))
+
+ if result := <-ss.Team().GetTotalMemberCount(teamId1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if result.Data.(int64) != 2 {
+ t.Fatal("wrong count")
+ }
+ }
+
+ if result := <-ss.Team().GetActiveMemberCount(teamId1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if result.Data.(int64) != 1 {
+ t.Fatal("wrong count")
+ }
+ }
+
+ m3 := &model.TeamMember{TeamId: teamId1, UserId: model.NewId()}
+ store.Must(ss.Team().SaveMember(m3))
+
+ if result := <-ss.Team().GetTotalMemberCount(teamId1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if result.Data.(int64) != 2 {
+ t.Fatal("wrong count")
+ }
+ }
+
+ if result := <-ss.Team().GetActiveMemberCount(teamId1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if result.Data.(int64) != 1 {
+ t.Fatal("wrong count")
+ }
+ }
+}
+
+func TestGetChannelUnreadsForAllTeams(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+ teamId2 := model.NewId()
+
+ uid := model.NewId()
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: uid}
+ m2 := &model.TeamMember{TeamId: teamId2, UserId: uid}
+ store.Must(ss.Team().SaveMember(m1))
+ store.Must(ss.Team().SaveMember(m2))
+
+ c1 := &model.Channel{TeamId: m1.TeamId, Name: model.NewId(), DisplayName: "Town Square", Type: model.CHANNEL_OPEN, TotalMsgCount: 100}
+ store.Must(ss.Channel().Save(c1))
+ c2 := &model.Channel{TeamId: m2.TeamId, Name: model.NewId(), DisplayName: "Town Square", Type: model.CHANNEL_OPEN, TotalMsgCount: 100}
+ store.Must(ss.Channel().Save(c2))
+
+ cm1 := &model.ChannelMember{ChannelId: c1.Id, UserId: m1.UserId, NotifyProps: model.GetDefaultChannelNotifyProps(), MsgCount: 90}
+ store.Must(ss.Channel().SaveMember(cm1))
+ cm2 := &model.ChannelMember{ChannelId: c2.Id, UserId: m2.UserId, NotifyProps: model.GetDefaultChannelNotifyProps(), MsgCount: 90}
+ store.Must(ss.Channel().SaveMember(cm2))
+
+ if r1 := <-ss.Team().GetChannelUnreadsForAllTeams("", uid); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.ChannelUnread)
+ membersMap := make(map[string]bool)
+ for i := range ms {
+ id := ms[i].TeamId
+ if _, ok := membersMap[id]; !ok {
+ membersMap[id] = true
+ }
+ }
+ if len(membersMap) != 2 {
+ t.Fatal("Should be the unreads for all the teams")
+ }
+
+ if ms[0].MsgCount != 10 {
+ t.Fatal("subtraction failed")
+ }
+ }
+
+ if r2 := <-ss.Team().GetChannelUnreadsForAllTeams(teamId1, uid); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ ms := r2.Data.([]*model.ChannelUnread)
+ membersMap := make(map[string]bool)
+ for i := range ms {
+ id := ms[i].TeamId
+ if _, ok := membersMap[id]; !ok {
+ membersMap[id] = true
+ }
+ }
+
+ if len(membersMap) != 1 {
+ t.Fatal("Should be the unreads for just one team")
+ }
+
+ if ms[0].MsgCount != 10 {
+ t.Fatal("subtraction failed")
+ }
+ }
+
+ if r1 := <-ss.Team().RemoveAllMembersByUser(uid); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+}
+
+func TestGetChannelUnreadsForTeam(t *testing.T) {
+ ss := Setup()
+
+ teamId1 := model.NewId()
+
+ uid := model.NewId()
+ m1 := &model.TeamMember{TeamId: teamId1, UserId: uid}
+ store.Must(ss.Team().SaveMember(m1))
+
+ c1 := &model.Channel{TeamId: m1.TeamId, Name: model.NewId(), DisplayName: "Town Square", Type: model.CHANNEL_OPEN, TotalMsgCount: 100}
+ store.Must(ss.Channel().Save(c1))
+ c2 := &model.Channel{TeamId: m1.TeamId, Name: model.NewId(), DisplayName: "Town Square", Type: model.CHANNEL_OPEN, TotalMsgCount: 100}
+ store.Must(ss.Channel().Save(c2))
+
+ cm1 := &model.ChannelMember{ChannelId: c1.Id, UserId: m1.UserId, NotifyProps: model.GetDefaultChannelNotifyProps(), MsgCount: 90}
+ store.Must(ss.Channel().SaveMember(cm1))
+ cm2 := &model.ChannelMember{ChannelId: c2.Id, UserId: m1.UserId, NotifyProps: model.GetDefaultChannelNotifyProps(), MsgCount: 90}
+ store.Must(ss.Channel().SaveMember(cm2))
+
+ if r1 := <-ss.Team().GetChannelUnreadsForTeam(m1.TeamId, m1.UserId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ ms := r1.Data.([]*model.ChannelUnread)
+ if len(ms) != 2 {
+ t.Fatal("wrong length")
+ }
+
+ if ms[0].MsgCount != 10 {
+ t.Fatal("subtraction failed")
+ }
+ }
+}
diff --git a/store/sqlstore/tokens_store.go b/store/sqlstore/tokens_store.go
new file mode 100644
index 000000000..31e39f1fe
--- /dev/null
+++ b/store/sqlstore/tokens_store.go
@@ -0,0 +1,110 @@
+// Copyright (c) 2017 Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+
+ l4g "github.com/alecthomas/log4go"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+type SqlTokenStore struct {
+ SqlStore
+}
+
+func NewSqlTokenStore(sqlStore SqlStore) store.TokenStore {
+ s := &SqlTokenStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Token{}, "Tokens").SetKeys(false, "Token")
+ table.ColMap("Token").SetMaxSize(64)
+ table.ColMap("Type").SetMaxSize(64)
+ table.ColMap("Extra").SetMaxSize(128)
+ }
+
+ return s
+}
+
+func (s SqlTokenStore) CreateIndexesIfNotExists() {
+}
+
+func (s SqlTokenStore) Save(token *model.Token) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if result.Err = token.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(token); err != nil {
+ result.Err = model.NewAppError("SqlTokenStore.Save", "store.sql_recover.save.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTokenStore) Delete(token string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := s.GetMaster().Exec("DELETE FROM Tokens WHERE Token = :Token", map[string]interface{}{"Token": token}); err != nil {
+ result.Err = model.NewAppError("SqlTokenStore.Delete", "store.sql_recover.delete.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTokenStore) GetByToken(tokenString string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ token := model.Token{}
+
+ if err := s.GetReplica().SelectOne(&token, "SELECT * FROM Tokens WHERE Token = :Token", map[string]interface{}{"Token": tokenString}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlTokenStore.GetByToken", "store.sql_recover.get_by_code.app_error", nil, err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlTokenStore.GetByToken", "store.sql_recover.get_by_code.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ result.Data = &token
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlTokenStore) Cleanup() {
+ l4g.Debug("Cleaning up token store.")
+ deltime := model.GetMillis() - model.MAX_TOKEN_EXIPRY_TIME
+ if _, err := s.GetMaster().Exec("DELETE FROM Tokens WHERE CreateAt < :DelTime", map[string]interface{}{"DelTime": deltime}); err != nil {
+ l4g.Error("Unable to cleanup token store.")
+ }
+}
diff --git a/store/sqlstore/upgrade.go b/store/sqlstore/upgrade.go
new file mode 100644
index 000000000..1973536f6
--- /dev/null
+++ b/store/sqlstore/upgrade.go
@@ -0,0 +1,310 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "os"
+ "strings"
+ "time"
+
+ l4g "github.com/alecthomas/log4go"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+const (
+ VERSION_4_3_0 = "4.3.0"
+ VERSION_4_2_0 = "4.2.0"
+ VERSION_4_1_0 = "4.1.0"
+ VERSION_4_0_0 = "4.0.0"
+ VERSION_3_10_0 = "3.10.0"
+ VERSION_3_9_0 = "3.9.0"
+ VERSION_3_8_0 = "3.8.0"
+ VERSION_3_7_0 = "3.7.0"
+ VERSION_3_6_0 = "3.6.0"
+ VERSION_3_5_0 = "3.5.0"
+ VERSION_3_4_0 = "3.4.0"
+ VERSION_3_3_0 = "3.3.0"
+ VERSION_3_2_0 = "3.2.0"
+ VERSION_3_1_0 = "3.1.0"
+ VERSION_3_0_0 = "3.0.0"
+ OLDEST_SUPPORTED_VERSION = VERSION_3_0_0
+)
+
+const (
+ EXIT_VERSION_SAVE_MISSING = 1001
+ EXIT_TOO_OLD = 1002
+ EXIT_VERSION_SAVE = 1003
+ EXIT_THEME_MIGRATION = 1004
+)
+
+func UpgradeDatabase(sqlStore SqlStore) {
+
+ UpgradeDatabaseToVersion31(sqlStore)
+ UpgradeDatabaseToVersion32(sqlStore)
+ UpgradeDatabaseToVersion33(sqlStore)
+ UpgradeDatabaseToVersion34(sqlStore)
+ UpgradeDatabaseToVersion35(sqlStore)
+ UpgradeDatabaseToVersion36(sqlStore)
+ UpgradeDatabaseToVersion37(sqlStore)
+ UpgradeDatabaseToVersion38(sqlStore)
+ UpgradeDatabaseToVersion39(sqlStore)
+ UpgradeDatabaseToVersion310(sqlStore)
+ UpgradeDatabaseToVersion40(sqlStore)
+ UpgradeDatabaseToVersion41(sqlStore)
+ UpgradeDatabaseToVersion42(sqlStore)
+
+ // If the SchemaVersion is empty this this is the first time it has ran
+ // so lets set it to the current version.
+ if sqlStore.GetCurrentSchemaVersion() == "" {
+ if result := <-sqlStore.System().SaveOrUpdate(&model.System{Name: "Version", Value: model.CurrentVersion}); result.Err != nil {
+ l4g.Critical(result.Err.Error())
+ time.Sleep(time.Second)
+ os.Exit(EXIT_VERSION_SAVE_MISSING)
+ }
+
+ l4g.Info(utils.T("store.sql.schema_set.info"), model.CurrentVersion)
+ }
+
+ // If we're not on the current version then it's too old to be upgraded
+ if sqlStore.GetCurrentSchemaVersion() != model.CurrentVersion {
+ l4g.Critical(utils.T("store.sql.schema_version.critical"), sqlStore.GetCurrentSchemaVersion(), OLDEST_SUPPORTED_VERSION, model.CurrentVersion, OLDEST_SUPPORTED_VERSION)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_TOO_OLD)
+ }
+}
+
+func saveSchemaVersion(sqlStore SqlStore, version string) {
+ if result := <-sqlStore.System().Update(&model.System{Name: "Version", Value: version}); result.Err != nil {
+ l4g.Critical(result.Err.Error())
+ time.Sleep(time.Second)
+ os.Exit(EXIT_VERSION_SAVE)
+ }
+
+ l4g.Warn(utils.T("store.sql.upgraded.warn"), version)
+}
+
+func shouldPerformUpgrade(sqlStore SqlStore, currentSchemaVersion string, expectedSchemaVersion string) bool {
+ if sqlStore.GetCurrentSchemaVersion() == currentSchemaVersion {
+ l4g.Warn(utils.T("store.sql.schema_out_of_date.warn"), currentSchemaVersion)
+ l4g.Warn(utils.T("store.sql.schema_upgrade_attempt.warn"), expectedSchemaVersion)
+
+ return true
+ }
+
+ return false
+}
+
+func UpgradeDatabaseToVersion31(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_0_0, VERSION_3_1_0) {
+ sqlStore.CreateColumnIfNotExists("OutgoingWebhooks", "ContentType", "varchar(128)", "varchar(128)", "")
+ saveSchemaVersion(sqlStore, VERSION_3_1_0)
+ }
+}
+
+func UpgradeDatabaseToVersion32(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_1_0, VERSION_3_2_0) {
+ sqlStore.CreateColumnIfNotExists("TeamMembers", "DeleteAt", "bigint(20)", "bigint", "0")
+
+ saveSchemaVersion(sqlStore, VERSION_3_2_0)
+ }
+}
+
+func themeMigrationFailed(err error) {
+ l4g.Critical(utils.T("store.sql_user.migrate_theme.critical"), err)
+ time.Sleep(time.Second)
+ os.Exit(EXIT_THEME_MIGRATION)
+}
+
+func UpgradeDatabaseToVersion33(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_2_0, VERSION_3_3_0) {
+ if sqlStore.DoesColumnExist("Users", "ThemeProps") {
+ params := map[string]interface{}{
+ "Category": model.PREFERENCE_CATEGORY_THEME,
+ "Name": "",
+ }
+
+ transaction, err := sqlStore.GetMaster().Begin()
+ if err != nil {
+ themeMigrationFailed(err)
+ }
+
+ // increase size of Value column of Preferences table to match the size of the ThemeProps column
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ if _, err := transaction.Exec("ALTER TABLE Preferences ALTER COLUMN Value TYPE varchar(2000)"); err != nil {
+ themeMigrationFailed(err)
+ }
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ if _, err := transaction.Exec("ALTER TABLE Preferences MODIFY Value text"); err != nil {
+ themeMigrationFailed(err)
+ }
+ }
+
+ // copy data across
+ if _, err := transaction.Exec(
+ `INSERT INTO
+ Preferences(UserId, Category, Name, Value)
+ SELECT
+ Id, '`+model.PREFERENCE_CATEGORY_THEME+`', '', ThemeProps
+ FROM
+ Users
+ WHERE
+ Users.ThemeProps != 'null'`, params); err != nil {
+ themeMigrationFailed(err)
+ }
+
+ // delete old data
+ if _, err := transaction.Exec("ALTER TABLE Users DROP COLUMN ThemeProps"); err != nil {
+ themeMigrationFailed(err)
+ }
+
+ if err := transaction.Commit(); err != nil {
+ themeMigrationFailed(err)
+ }
+
+ // rename solarized_* code themes to solarized-* to match client changes in 3.0
+ var data model.Preferences
+ if _, err := sqlStore.GetMaster().Select(&data, "SELECT * FROM Preferences WHERE Category = '"+model.PREFERENCE_CATEGORY_THEME+"' AND Value LIKE '%solarized_%'"); err == nil {
+ for i := range data {
+ data[i].Value = strings.Replace(data[i].Value, "solarized_", "solarized-", -1)
+ }
+
+ sqlStore.Preference().Save(&data)
+ }
+ }
+
+ sqlStore.CreateColumnIfNotExists("OAuthApps", "IsTrusted", "tinyint(1)", "boolean", "0")
+ sqlStore.CreateColumnIfNotExists("OAuthApps", "IconURL", "varchar(512)", "varchar(512)", "")
+ sqlStore.CreateColumnIfNotExists("OAuthAccessData", "ClientId", "varchar(26)", "varchar(26)", "")
+ sqlStore.CreateColumnIfNotExists("OAuthAccessData", "UserId", "varchar(26)", "varchar(26)", "")
+ sqlStore.CreateColumnIfNotExists("OAuthAccessData", "ExpiresAt", "bigint", "bigint", "0")
+
+ if sqlStore.DoesColumnExist("OAuthAccessData", "AuthCode") {
+ sqlStore.RemoveIndexIfExists("idx_oauthaccessdata_auth_code", "OAuthAccessData")
+ sqlStore.RemoveColumnIfExists("OAuthAccessData", "AuthCode")
+ }
+
+ sqlStore.RemoveColumnIfExists("Users", "LastActivityAt")
+ sqlStore.RemoveColumnIfExists("Users", "LastPingAt")
+
+ sqlStore.CreateColumnIfNotExists("OutgoingWebhooks", "TriggerWhen", "tinyint", "integer", "0")
+
+ saveSchemaVersion(sqlStore, VERSION_3_3_0)
+ }
+}
+
+func UpgradeDatabaseToVersion34(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_3_0, VERSION_3_4_0) {
+ sqlStore.CreateColumnIfNotExists("Status", "Manual", "BOOLEAN", "BOOLEAN", "0")
+ sqlStore.CreateColumnIfNotExists("Status", "ActiveChannel", "varchar(26)", "varchar(26)", "")
+
+ saveSchemaVersion(sqlStore, VERSION_3_4_0)
+ }
+}
+
+func UpgradeDatabaseToVersion35(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_4_0, VERSION_3_5_0) {
+ sqlStore.GetMaster().Exec("UPDATE Users SET Roles = 'system_user' WHERE Roles = ''")
+ sqlStore.GetMaster().Exec("UPDATE Users SET Roles = 'system_user system_admin' WHERE Roles = 'system_admin'")
+ sqlStore.GetMaster().Exec("UPDATE TeamMembers SET Roles = 'team_user' WHERE Roles = ''")
+ sqlStore.GetMaster().Exec("UPDATE TeamMembers SET Roles = 'team_user team_admin' WHERE Roles = 'admin'")
+ sqlStore.GetMaster().Exec("UPDATE ChannelMembers SET Roles = 'channel_user' WHERE Roles = ''")
+ sqlStore.GetMaster().Exec("UPDATE ChannelMembers SET Roles = 'channel_user channel_admin' WHERE Roles = 'admin'")
+
+ // The rest of the migration from Filenames -> FileIds is done lazily in api.GetFileInfosForPost
+ sqlStore.CreateColumnIfNotExists("Posts", "FileIds", "varchar(150)", "varchar(150)", "[]")
+
+ // Increase maximum length of the Channel table Purpose column.
+ if sqlStore.GetMaxLengthOfColumnIfExists("Channels", "Purpose") != "250" {
+ sqlStore.AlterColumnTypeIfExists("Channels", "Purpose", "varchar(250)", "varchar(250)")
+ }
+
+ sqlStore.Session().RemoveAllSessions()
+
+ saveSchemaVersion(sqlStore, VERSION_3_5_0)
+ }
+}
+
+func UpgradeDatabaseToVersion36(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_5_0, VERSION_3_6_0) {
+ sqlStore.CreateColumnIfNotExists("Posts", "HasReactions", "tinyint", "boolean", "0")
+
+ // Create Team Description column
+ sqlStore.CreateColumnIfNotExists("Teams", "Description", "varchar(255)", "varchar(255)", "")
+
+ // Add a Position column to users.
+ sqlStore.CreateColumnIfNotExists("Users", "Position", "varchar(64)", "varchar(64)", "")
+
+ // Remove ActiveChannel column from Status
+ sqlStore.RemoveColumnIfExists("Status", "ActiveChannel")
+
+ saveSchemaVersion(sqlStore, VERSION_3_6_0)
+ }
+}
+
+func UpgradeDatabaseToVersion37(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_6_0, VERSION_3_7_0) {
+ // Add EditAt column to Posts
+ sqlStore.CreateColumnIfNotExists("Posts", "EditAt", " bigint", " bigint", "0")
+
+ saveSchemaVersion(sqlStore, VERSION_3_7_0)
+ }
+}
+
+func UpgradeDatabaseToVersion38(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_7_0, VERSION_3_8_0) {
+ // Add the IsPinned column to posts.
+ sqlStore.CreateColumnIfNotExists("Posts", "IsPinned", "boolean", "boolean", "0")
+
+ saveSchemaVersion(sqlStore, VERSION_3_8_0)
+ }
+}
+
+func UpgradeDatabaseToVersion39(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_8_0, VERSION_3_9_0) {
+ sqlStore.CreateColumnIfNotExists("OAuthAccessData", "Scope", "varchar(128)", "varchar(128)", model.DEFAULT_SCOPE)
+ sqlStore.RemoveTableIfExists("PasswordRecovery")
+
+ saveSchemaVersion(sqlStore, VERSION_3_9_0)
+ }
+}
+
+func UpgradeDatabaseToVersion310(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_9_0, VERSION_3_10_0) {
+ saveSchemaVersion(sqlStore, VERSION_3_10_0)
+ }
+}
+
+func UpgradeDatabaseToVersion40(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_3_10_0, VERSION_4_0_0) {
+ saveSchemaVersion(sqlStore, VERSION_4_0_0)
+ }
+}
+
+func UpgradeDatabaseToVersion41(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_4_0_0, VERSION_4_1_0) {
+ // Increase maximum length of the Users table Roles column.
+ if sqlStore.GetMaxLengthOfColumnIfExists("Users", "Roles") != "256" {
+ sqlStore.AlterColumnTypeIfExists("Users", "Roles", "varchar(256)", "varchar(256)")
+ }
+
+ sqlStore.RemoveTableIfExists("JobStatuses")
+
+ saveSchemaVersion(sqlStore, VERSION_4_1_0)
+ }
+}
+
+func UpgradeDatabaseToVersion42(sqlStore SqlStore) {
+ if shouldPerformUpgrade(sqlStore, VERSION_4_1_0, VERSION_4_2_0) {
+ saveSchemaVersion(sqlStore, VERSION_4_2_0)
+ }
+}
+
+func UpgradeDatabaseToVersion43(sqlStore SqlStore) {
+ // TODO: Uncomment following condition when version 4.3.0 is released
+ //if shouldPerformUpgrade(sqlStore, VERSION_4_2_0, VERSION_4_3_0) {
+ // saveSchemaVersion(sqlStore, VERSION_4_3_0)
+ //}
+}
diff --git a/store/sqlstore/upgrade_test.go b/store/sqlstore/upgrade_test.go
new file mode 100644
index 000000000..d8d6a9e9d
--- /dev/null
+++ b/store/sqlstore/upgrade_test.go
@@ -0,0 +1,41 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+func TestStoreUpgrade(t *testing.T) {
+ ss := Setup()
+
+ saveSchemaVersion(ss.(*store.LayeredStore).DatabaseLayer.(SqlStore), VERSION_3_0_0)
+ UpgradeDatabase(ss.(*store.LayeredStore).DatabaseLayer.(SqlStore))
+
+ saveSchemaVersion(ss.(*store.LayeredStore).DatabaseLayer.(SqlStore), "")
+ UpgradeDatabase(ss.(*store.LayeredStore).DatabaseLayer.(SqlStore))
+}
+
+func TestSaveSchemaVersion(t *testing.T) {
+ ss := Setup()
+
+ saveSchemaVersion(ss.(*store.LayeredStore).DatabaseLayer.(SqlStore), VERSION_3_0_0)
+ if result := <-ss.System().Get(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ props := result.Data.(model.StringMap)
+ if props["Version"] != VERSION_3_0_0 {
+ t.Fatal("version not updated")
+ }
+ }
+
+ if ss.(*store.LayeredStore).DatabaseLayer.(SqlStore).GetCurrentSchemaVersion() != VERSION_3_0_0 {
+ t.Fatal("version not updated")
+ }
+
+ saveSchemaVersion(ss.(*store.LayeredStore).DatabaseLayer.(SqlStore), model.CurrentVersion)
+}
diff --git a/store/sqlstore/user_access_token_store.go b/store/sqlstore/user_access_token_store.go
new file mode 100644
index 000000000..5186ead41
--- /dev/null
+++ b/store/sqlstore/user_access_token_store.go
@@ -0,0 +1,263 @@
+// Copyright (c) 2017 Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "net/http"
+
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlUserAccessTokenStore struct {
+ SqlStore
+}
+
+func NewSqlUserAccessTokenStore(sqlStore SqlStore) store.UserAccessTokenStore {
+ s := &SqlUserAccessTokenStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.UserAccessToken{}, "UserAccessTokens").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Token").SetMaxSize(26).SetUnique(true)
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("Description").SetMaxSize(512)
+ }
+
+ return s
+}
+
+func (s SqlUserAccessTokenStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_user_access_tokens_token", "UserAccessTokens", "Token")
+ s.CreateIndexIfNotExists("idx_user_access_tokens_user_id", "UserAccessTokens", "UserId")
+}
+
+func (s SqlUserAccessTokenStore) Save(token *model.UserAccessToken) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ token.PreSave()
+
+ if result.Err = token.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(token); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.Save", "store.sql_user_access_token.save.app_error", nil, "", http.StatusInternalServerError)
+ } else {
+ result.Data = token
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserAccessTokenStore) Delete(tokenId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ transaction, err := s.GetMaster().Begin()
+ if err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.Delete", "store.sql_user_access_token.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ if extrasResult := s.deleteSessionsAndTokensById(transaction, tokenId); extrasResult.Err != nil {
+ result = extrasResult
+ }
+
+ if result.Err == nil {
+ if err := transaction.Commit(); err != nil {
+ // don't need to rollback here since the transaction is already closed
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.Delete", "store.sql_user_access_token.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ if err := transaction.Rollback(); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.Delete", "store.sql_user_access_token.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserAccessTokenStore) deleteSessionsAndTokensById(transaction *gorp.Transaction, tokenId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ query := ""
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query = "DELETE FROM Sessions s USING UserAccessTokens o WHERE o.Token = s.Token AND o.Id = :Id"
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ query = "DELETE s.* FROM Sessions s INNER JOIN UserAccessTokens o ON o.Token = s.Token WHERE o.Id = :Id"
+ }
+
+ if _, err := transaction.Exec(query, map[string]interface{}{"Id": tokenId}); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.deleteSessionsById", "store.sql_user_access_token.delete.app_error", nil, "id="+tokenId+", err="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ return s.deleteTokensById(transaction, tokenId)
+}
+
+func (s SqlUserAccessTokenStore) deleteTokensById(transaction *gorp.Transaction, tokenId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ if _, err := transaction.Exec("DELETE FROM UserAccessTokens WHERE Id = :Id", map[string]interface{}{"Id": tokenId}); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.deleteTokensById", "store.sql_user_access_token.delete.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ return result
+}
+
+func (s SqlUserAccessTokenStore) DeleteAllForUser(userId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ transaction, err := s.GetMaster().Begin()
+ if err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.DeleteAllForUser", "store.sql_user_access_token.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ if extrasResult := s.deleteSessionsandTokensByUser(transaction, userId); extrasResult.Err != nil {
+ result = extrasResult
+ }
+
+ if result.Err == nil {
+ if err := transaction.Commit(); err != nil {
+ // don't need to rollback here since the transaction is already closed
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.DeleteAllForUser", "store.sql_user_access_token.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ if err := transaction.Rollback(); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.DeleteAllForUser", "store.sql_user_access_token.delete.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserAccessTokenStore) deleteSessionsandTokensByUser(transaction *gorp.Transaction, userId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ query := ""
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ query = "DELETE FROM Sessions s USING UserAccessTokens o WHERE o.Token = s.Token AND o.UserId = :UserId"
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ query = "DELETE s.* FROM Sessions s INNER JOIN UserAccessTokens o ON o.Token = s.Token WHERE o.UserId = :UserId"
+ }
+
+ if _, err := transaction.Exec(query, map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.deleteSessionsByUser", "store.sql_user_access_token.delete.app_error", nil, "user_id="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ return result
+ }
+
+ return s.deleteTokensByUser(transaction, userId)
+}
+
+func (s SqlUserAccessTokenStore) deleteTokensByUser(transaction *gorp.Transaction, userId string) store.StoreResult {
+ result := store.StoreResult{}
+
+ if _, err := transaction.Exec("DELETE FROM UserAccessTokens WHERE UserId = :UserId", map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.deleteTokensByUser", "store.sql_user_access_token.delete.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ return result
+}
+
+func (s SqlUserAccessTokenStore) Get(tokenId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ token := model.UserAccessToken{}
+
+ if err := s.GetReplica().SelectOne(&token, "SELECT * FROM UserAccessTokens WHERE Id = :Id", map[string]interface{}{"Id": tokenId}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.Get", "store.sql_user_access_token.get.app_error", nil, err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.Get", "store.sql_user_access_token.get.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ result.Data = &token
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserAccessTokenStore) GetByToken(tokenString string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ token := model.UserAccessToken{}
+
+ if err := s.GetReplica().SelectOne(&token, "SELECT * FROM UserAccessTokens WHERE Token = :Token", map[string]interface{}{"Token": tokenString}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.GetByToken", "store.sql_user_access_token.get_by_token.app_error", nil, err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.GetByToken", "store.sql_user_access_token.get_by_token.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ result.Data = &token
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserAccessTokenStore) GetByUser(userId string, offset, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ tokens := []*model.UserAccessToken{}
+
+ if _, err := s.GetReplica().Select(&tokens, "SELECT * FROM UserAccessTokens WHERE UserId = :UserId LIMIT :Limit OFFSET :Offset", map[string]interface{}{"UserId": userId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserAccessTokenStore.GetByUser", "store.sql_user_access_token.get_by_user.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = tokens
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/user_access_token_store_test.go b/store/sqlstore/user_access_token_store_test.go
new file mode 100644
index 000000000..e160ddfa1
--- /dev/null
+++ b/store/sqlstore/user_access_token_store_test.go
@@ -0,0 +1,87 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+
+ "github.com/mattermost/mattermost-server/model"
+"github.com/mattermost/mattermost-server/store"
+)
+
+func TestUserAccessTokenSaveGetDelete(t *testing.T) {
+ ss := Setup()
+
+ uat := &model.UserAccessToken{
+ Token: model.NewId(),
+ UserId: model.NewId(),
+ Description: "testtoken",
+ }
+
+ s1 := model.Session{}
+ s1.UserId = uat.UserId
+ s1.Token = uat.Token
+
+ store.Must(ss.Session().Save(&s1))
+
+ if result := <-ss.UserAccessToken().Save(uat); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.UserAccessToken().Get(uat.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.(*model.UserAccessToken); received.Token != uat.Token {
+ t.Fatal("received incorrect token after save")
+ }
+
+ if result := <-ss.UserAccessToken().GetByToken(uat.Token); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.(*model.UserAccessToken); received.Token != uat.Token {
+ t.Fatal("received incorrect token after save")
+ }
+
+ if result := <-ss.UserAccessToken().GetByToken("notarealtoken"); result.Err == nil {
+ t.Fatal("should have failed on bad token")
+ }
+
+ if result := <-ss.UserAccessToken().GetByUser(uat.UserId, 0, 100); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.UserAccessToken); len(received) != 1 {
+ t.Fatal("received incorrect number of tokens after save")
+ }
+
+ if result := <-ss.UserAccessToken().Delete(uat.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if err := (<-ss.Session().Get(s1.Token)).Err; err == nil {
+ t.Fatal("should error - session should be deleted")
+ }
+
+ if err := (<-ss.UserAccessToken().GetByToken(s1.Token)).Err; err == nil {
+ t.Fatal("should error - access token should be deleted")
+ }
+
+ s2 := model.Session{}
+ s2.UserId = uat.UserId
+ s2.Token = uat.Token
+
+ store.Must(ss.Session().Save(&s2))
+
+ if result := <-ss.UserAccessToken().Save(uat); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if result := <-ss.UserAccessToken().DeleteAllForUser(uat.UserId); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ if err := (<-ss.Session().Get(s2.Token)).Err; err == nil {
+ t.Fatal("should error - session should be deleted")
+ }
+
+ if err := (<-ss.UserAccessToken().GetByToken(s2.Token)).Err; err == nil {
+ t.Fatal("should error - access token should be deleted")
+ }
+}
diff --git a/store/sqlstore/user_store.go b/store/sqlstore/user_store.go
new file mode 100644
index 000000000..4f4380d79
--- /dev/null
+++ b/store/sqlstore/user_store.go
@@ -0,0 +1,1620 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "database/sql"
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+const (
+ PROFILES_IN_CHANNEL_CACHE_SIZE = model.CHANNEL_CACHE_SIZE
+ PROFILES_IN_CHANNEL_CACHE_SEC = 900 // 15 mins
+ PROFILE_BY_IDS_CACHE_SIZE = model.SESSION_CACHE_SIZE
+ PROFILE_BY_IDS_CACHE_SEC = 900 // 15 mins
+ USER_SEARCH_TYPE_NAMES_NO_FULL_NAME = "Username, Nickname"
+ USER_SEARCH_TYPE_NAMES = "Username, FirstName, LastName, Nickname"
+ USER_SEARCH_TYPE_ALL_NO_FULL_NAME = "Username, Nickname, Email"
+ USER_SEARCH_TYPE_ALL = "Username, FirstName, LastName, Nickname, Email"
+)
+
+type SqlUserStore struct {
+ SqlStore
+ metrics einterfaces.MetricsInterface
+}
+
+var profilesInChannelCache *utils.Cache = utils.NewLru(PROFILES_IN_CHANNEL_CACHE_SIZE)
+var profileByIdsCache *utils.Cache = utils.NewLru(PROFILE_BY_IDS_CACHE_SIZE)
+
+func ClearUserCaches() {
+ profilesInChannelCache.Purge()
+ profileByIdsCache.Purge()
+}
+
+func (us SqlUserStore) InvalidatProfileCacheForUser(userId string) {
+ profileByIdsCache.Remove(userId)
+}
+
+func NewSqlUserStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.UserStore {
+ us := &SqlUserStore{
+ SqlStore: sqlStore,
+ metrics: metrics,
+ }
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.User{}, "Users").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Username").SetMaxSize(64).SetUnique(true)
+ table.ColMap("Password").SetMaxSize(128)
+ table.ColMap("AuthData").SetMaxSize(128).SetUnique(true)
+ table.ColMap("AuthService").SetMaxSize(32)
+ table.ColMap("Email").SetMaxSize(128).SetUnique(true)
+ table.ColMap("Nickname").SetMaxSize(64)
+ table.ColMap("FirstName").SetMaxSize(64)
+ table.ColMap("LastName").SetMaxSize(64)
+ table.ColMap("Roles").SetMaxSize(256)
+ table.ColMap("Props").SetMaxSize(4000)
+ table.ColMap("NotifyProps").SetMaxSize(2000)
+ table.ColMap("Locale").SetMaxSize(5)
+ table.ColMap("MfaSecret").SetMaxSize(128)
+ table.ColMap("Position").SetMaxSize(64)
+ }
+
+ return us
+}
+
+func (us SqlUserStore) CreateIndexesIfNotExists() {
+ us.CreateIndexIfNotExists("idx_users_email", "Users", "Email")
+ us.CreateIndexIfNotExists("idx_users_update_at", "Users", "UpdateAt")
+ us.CreateIndexIfNotExists("idx_users_create_at", "Users", "CreateAt")
+ us.CreateIndexIfNotExists("idx_users_delete_at", "Users", "DeleteAt")
+
+ us.CreateFullTextIndexIfNotExists("idx_users_all_txt", "Users", USER_SEARCH_TYPE_ALL)
+ us.CreateFullTextIndexIfNotExists("idx_users_all_no_full_name_txt", "Users", USER_SEARCH_TYPE_ALL_NO_FULL_NAME)
+ us.CreateFullTextIndexIfNotExists("idx_users_names_txt", "Users", USER_SEARCH_TYPE_NAMES)
+ us.CreateFullTextIndexIfNotExists("idx_users_names_no_full_name_txt", "Users", USER_SEARCH_TYPE_NAMES_NO_FULL_NAME)
+}
+
+func (us SqlUserStore) Save(user *model.User) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(user.Id) > 0 {
+ result.Err = model.NewAppError("SqlUserStore.Save", "store.sql_user.save.existing.app_error", nil, "user_id="+user.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ user.PreSave()
+ if result.Err = user.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := us.GetMaster().Insert(user); err != nil {
+ if IsUniqueConstraintError(err, []string{"Email", "users_email_key", "idx_users_email_unique"}) {
+ result.Err = model.NewAppError("SqlUserStore.Save", "store.sql_user.save.email_exists.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusBadRequest)
+ } else if IsUniqueConstraintError(err, []string{"Username", "users_username_key", "idx_users_username_unique"}) {
+ result.Err = model.NewAppError("SqlUserStore.Save", "store.sql_user.save.username_exists.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlUserStore.Save", "store.sql_user.save.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = user
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) Update(user *model.User, trustedUpdateData bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ user.PreUpdate()
+
+ if result.Err = user.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if oldUserResult, err := us.GetMaster().Get(model.User{}, user.Id); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.finding.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else if oldUserResult == nil {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.find.app_error", nil, "user_id="+user.Id, http.StatusBadRequest)
+ } else {
+ oldUser := oldUserResult.(*model.User)
+ user.CreateAt = oldUser.CreateAt
+ user.AuthData = oldUser.AuthData
+ user.AuthService = oldUser.AuthService
+ user.Password = oldUser.Password
+ user.LastPasswordUpdate = oldUser.LastPasswordUpdate
+ user.LastPictureUpdate = oldUser.LastPictureUpdate
+ user.EmailVerified = oldUser.EmailVerified
+ user.FailedAttempts = oldUser.FailedAttempts
+ user.MfaSecret = oldUser.MfaSecret
+ user.MfaActive = oldUser.MfaActive
+
+ if !trustedUpdateData {
+ user.Roles = oldUser.Roles
+ user.DeleteAt = oldUser.DeleteAt
+ }
+
+ if user.IsOAuthUser() {
+ if !trustedUpdateData {
+ user.Email = oldUser.Email
+ }
+ } else if user.IsLDAPUser() && !trustedUpdateData {
+ if user.Username != oldUser.Username ||
+ user.Email != oldUser.Email {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.can_not_change_ldap.app_error", nil, "user_id="+user.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+ } else if user.Email != oldUser.Email {
+ user.EmailVerified = false
+ }
+
+ if user.Username != oldUser.Username {
+ user.UpdateMentionKeysFromUsername(oldUser.Username)
+ }
+
+ if count, err := us.GetMaster().Update(user); err != nil {
+ if IsUniqueConstraintError(err, []string{"Email", "users_email_key", "idx_users_email_unique"}) {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.email_taken.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusBadRequest)
+ } else if IsUniqueConstraintError(err, []string{"Username", "users_username_key", "idx_users_username_unique"}) {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.username_taken.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.updating.app_error", nil, "user_id="+user.Id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else if count != 1 {
+ result.Err = model.NewAppError("SqlUserStore.Update", "store.sql_user.update.app_error", nil, fmt.Sprintf("user_id=%v, count=%v", user.Id, count), http.StatusInternalServerError)
+ } else {
+ user.Sanitize(map[string]bool{})
+ oldUser.Sanitize(map[string]bool{})
+ result.Data = [2]*model.User{user, oldUser}
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdateLastPictureUpdate(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ curTime := model.GetMillis()
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET LastPictureUpdate = :Time, UpdateAt = :Time WHERE Id = :UserId", map[string]interface{}{"Time": curTime, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.UpdateUpdateAt", "store.sql_user.update_last_picture_update.app_error", nil, "user_id="+userId, http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdateUpdateAt(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ curTime := model.GetMillis()
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET UpdateAt = :Time WHERE Id = :UserId", map[string]interface{}{"Time": curTime, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.UpdateUpdateAt", "store.sql_user.update_update.app_error", nil, "user_id="+userId, http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdatePassword(userId, hashedPassword string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ updateAt := model.GetMillis()
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET Password = :Password, LastPasswordUpdate = :LastPasswordUpdate, UpdateAt = :UpdateAt, AuthData = NULL, AuthService = '', EmailVerified = true, FailedAttempts = 0 WHERE Id = :UserId", map[string]interface{}{"Password": hashedPassword, "LastPasswordUpdate": updateAt, "UpdateAt": updateAt, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.UpdatePassword", "store.sql_user.update_password.app_error", nil, "id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdateFailedPasswordAttempts(userId string, attempts int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET FailedAttempts = :FailedAttempts WHERE Id = :UserId", map[string]interface{}{"FailedAttempts": attempts, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.UpdateFailedPasswordAttempts", "store.sql_user.update_failed_pwd_attempts.app_error", nil, "user_id="+userId, http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdateAuthData(userId string, service string, authData *string, email string, resetMfa bool) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ email = strings.ToLower(email)
+
+ updateAt := model.GetMillis()
+
+ query := `
+ UPDATE
+ Users
+ SET
+ Password = '',
+ LastPasswordUpdate = :LastPasswordUpdate,
+ UpdateAt = :UpdateAt,
+ FailedAttempts = 0,
+ AuthService = :AuthService,
+ AuthData = :AuthData`
+
+ if len(email) != 0 {
+ query += ", Email = :Email"
+ }
+
+ if resetMfa {
+ query += ", MfaActive = false, MfaSecret = ''"
+ }
+
+ query += " WHERE Id = :UserId"
+
+ if _, err := us.GetMaster().Exec(query, map[string]interface{}{"LastPasswordUpdate": updateAt, "UpdateAt": updateAt, "UserId": userId, "AuthService": service, "AuthData": authData, "Email": email}); err != nil {
+ if IsUniqueConstraintError(err, []string{"Email", "users_email_key", "idx_users_email_unique", "AuthData", "users_authdata_key"}) {
+ result.Err = model.NewAppError("SqlUserStore.UpdateAuthData", "store.sql_user.update_auth_data.email_exists.app_error", map[string]interface{}{"Service": service, "Email": email}, "user_id="+userId+", "+err.Error(), http.StatusBadRequest)
+ } else {
+ result.Err = model.NewAppError("SqlUserStore.UpdateAuthData", "store.sql_user.update_auth_data.app_error", nil, "id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdateMfaSecret(userId, secret string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ updateAt := model.GetMillis()
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET MfaSecret = :Secret, UpdateAt = :UpdateAt WHERE Id = :UserId", map[string]interface{}{"Secret": secret, "UpdateAt": updateAt, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.UpdateMfaSecret", "store.sql_user.update_mfa_secret.app_error", nil, "id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) UpdateMfaActive(userId string, active bool) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ updateAt := model.GetMillis()
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET MfaActive = :Active, UpdateAt = :UpdateAt WHERE Id = :UserId", map[string]interface{}{"Active": active, "UpdateAt": updateAt, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.UpdateMfaActive", "store.sql_user.update_mfa_active.app_error", nil, "id="+userId+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = userId
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) Get(id string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if obj, err := us.GetReplica().Get(model.User{}, id); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.Get", "store.sql_user.get.app_error", nil, "user_id="+id+", "+err.Error(), http.StatusInternalServerError)
+ } else if obj == nil {
+ result.Err = model.NewAppError("SqlUserStore.Get", store.MISSING_ACCOUNT_ERROR, nil, "user_id="+id, http.StatusNotFound)
+ } else {
+ result.Data = obj.(*model.User)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetAll() store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var data []*model.User
+ if _, err := us.GetReplica().Select(&data, "SELECT * FROM Users"); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetAll", "store.sql_user.get.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserStore) GetEtagForAllProfiles() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ updateAt, err := s.GetReplica().SelectInt("SELECT UpdateAt FROM Users ORDER BY UpdateAt DESC LIMIT 1")
+ if err != nil {
+ result.Data = fmt.Sprintf("%v.%v.%v.%v", model.CurrentVersion, model.GetMillis(), utils.Cfg.PrivacySettings.ShowFullName, utils.Cfg.PrivacySettings.ShowEmailAddress)
+ } else {
+ result.Data = fmt.Sprintf("%v.%v.%v.%v", model.CurrentVersion, updateAt, utils.Cfg.PrivacySettings.ShowFullName, utils.Cfg.PrivacySettings.ShowEmailAddress)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetAllProfiles(offset int, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ if _, err := us.GetReplica().Select(&users, "SELECT * FROM Users ORDER BY Username ASC LIMIT :Limit OFFSET :Offset", map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetAllProfiles", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlUserStore) GetEtagForProfiles(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ updateAt, err := s.GetReplica().SelectInt("SELECT UpdateAt FROM Users, TeamMembers WHERE TeamMembers.TeamId = :TeamId AND Users.Id = TeamMembers.UserId ORDER BY UpdateAt DESC LIMIT 1", map[string]interface{}{"TeamId": teamId})
+ if err != nil {
+ result.Data = fmt.Sprintf("%v.%v.%v.%v", model.CurrentVersion, model.GetMillis(), utils.Cfg.PrivacySettings.ShowFullName, utils.Cfg.PrivacySettings.ShowEmailAddress)
+ } else {
+ result.Data = fmt.Sprintf("%v.%v.%v.%v", model.CurrentVersion, updateAt, utils.Cfg.PrivacySettings.ShowFullName, utils.Cfg.PrivacySettings.ShowEmailAddress)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetProfiles(teamId string, offset int, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ if _, err := us.GetReplica().Select(&users, "SELECT Users.* FROM Users, TeamMembers WHERE TeamMembers.TeamId = :TeamId AND Users.Id = TeamMembers.UserId AND TeamMembers.DeleteAt = 0 ORDER BY Users.Username ASC LIMIT :Limit OFFSET :Offset", map[string]interface{}{"TeamId": teamId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfiles", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) InvalidateProfilesInChannelCacheByUser(userId string) {
+ keys := profilesInChannelCache.Keys()
+
+ for _, key := range keys {
+ if cacheItem, ok := profilesInChannelCache.Get(key); ok {
+ userMap := cacheItem.(map[string]*model.User)
+ if _, userInCache := userMap[userId]; userInCache {
+ profilesInChannelCache.Remove(key)
+ }
+ }
+ }
+}
+
+func (us SqlUserStore) InvalidateProfilesInChannelCache(channelId string) {
+ profilesInChannelCache.Remove(channelId)
+}
+
+func (us SqlUserStore) GetProfilesInChannel(channelId string, offset int, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ query := "SELECT Users.* FROM Users, ChannelMembers WHERE ChannelMembers.ChannelId = :ChannelId AND Users.Id = ChannelMembers.UserId ORDER BY Users.Username ASC LIMIT :Limit OFFSET :Offset"
+
+ if _, err := us.GetReplica().Select(&users, query, map[string]interface{}{"ChannelId": channelId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfilesInChannel", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetAllProfilesInChannel(channelId string, allowFromCache bool) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := profilesInChannelCache.Get(channelId); ok {
+ if us.metrics != nil {
+ us.metrics.IncrementMemCacheHitCounter("Profiles in Channel")
+ }
+ result.Data = cacheItem.(map[string]*model.User)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if us.metrics != nil {
+ us.metrics.IncrementMemCacheMissCounter("Profiles in Channel")
+ }
+ }
+ } else {
+ if us.metrics != nil {
+ us.metrics.IncrementMemCacheMissCounter("Profiles in Channel")
+ }
+ }
+
+ var users []*model.User
+
+ query := "SELECT Users.* FROM Users, ChannelMembers WHERE ChannelMembers.ChannelId = :ChannelId AND Users.Id = ChannelMembers.UserId AND Users.DeleteAt = 0"
+
+ if _, err := us.GetReplica().Select(&users, query, map[string]interface{}{"ChannelId": channelId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetAllProfilesInChannel", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ userMap := make(map[string]*model.User)
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ userMap[u.Id] = u
+ }
+
+ result.Data = userMap
+
+ if allowFromCache {
+ profilesInChannelCache.AddWithExpiresInSecs(channelId, userMap, PROFILES_IN_CHANNEL_CACHE_SEC)
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetProfilesNotInChannel(teamId string, channelId string, offset int, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ if _, err := us.GetReplica().Select(&users, `
+ SELECT
+ u.*
+ FROM Users u
+ INNER JOIN TeamMembers tm
+ ON tm.UserId = u.Id
+ AND tm.TeamId = :TeamId
+ AND tm.DeleteAt = 0
+ LEFT JOIN ChannelMembers cm
+ ON cm.UserId = u.Id
+ AND cm.ChannelId = :ChannelId
+ WHERE cm.UserId IS NULL
+ ORDER BY u.Username ASC
+ LIMIT :Limit OFFSET :Offset
+ `, map[string]interface{}{"TeamId": teamId, "ChannelId": channelId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfilesNotInChannel", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetProfilesWithoutTeam(offset int, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ query := `
+ SELECT
+ *
+ FROM
+ Users
+ WHERE
+ (SELECT
+ COUNT(0)
+ FROM
+ TeamMembers
+ WHERE
+ TeamMembers.UserId = Users.Id
+ AND TeamMembers.DeleteAt = 0) = 0
+ ORDER BY
+ Username ASC
+ LIMIT
+ :Limit
+ OFFSET
+ :Offset`
+
+ if _, err := us.GetReplica().Select(&users, query, map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfilesWithoutTeam", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetProfilesByUsernames(usernames []string, teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+ props := make(map[string]interface{})
+ idQuery := ""
+
+ for index, usernames := range usernames {
+ if len(idQuery) > 0 {
+ idQuery += ", "
+ }
+
+ props["username"+strconv.Itoa(index)] = usernames
+ idQuery += ":username" + strconv.Itoa(index)
+ }
+
+ var query string
+ if teamId == "" {
+ query = `SELECT * FROM Users WHERE Username IN (` + idQuery + `)`
+ } else {
+ query = `SELECT Users.* FROM Users INNER JOIN TeamMembers ON
+ Users.Id = TeamMembers.UserId AND Users.Username IN (` + idQuery + `) AND TeamMembers.TeamId = :TeamId `
+ props["TeamId"] = teamId
+ }
+
+ if _, err := us.GetReplica().Select(&users, query, props); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfilesByUsernames", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+type UserWithLastActivityAt struct {
+ model.User
+ LastActivityAt int64
+}
+
+func (us SqlUserStore) GetRecentlyActiveUsersForTeam(teamId string, offset, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*UserWithLastActivityAt
+
+ if _, err := us.GetReplica().Select(&users, `
+ SELECT
+ u.*,
+ s.LastActivityAt
+ FROM Users AS u
+ INNER JOIN TeamMembers AS t ON u.Id = t.UserId
+ INNER JOIN Status AS s ON s.UserId = t.UserId
+ WHERE t.TeamId = :TeamId
+ ORDER BY s.LastActivityAt DESC
+ LIMIT :Limit OFFSET :Offset
+ `, map[string]interface{}{"TeamId": teamId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetRecentlyActiveUsers", "store.sql_user.get_recently_active_users.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ userList := []*model.User{}
+
+ for _, userWithLastActivityAt := range users {
+ u := userWithLastActivityAt.User
+ u.Sanitize(map[string]bool{})
+ u.LastActivityAt = userWithLastActivityAt.LastActivityAt
+ userList = append(userList, &u)
+ }
+
+ result.Data = userList
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetNewUsersForTeam(teamId string, offset, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ if _, err := us.GetReplica().Select(&users, `
+ SELECT
+ u.*
+ FROM Users AS u
+ INNER JOIN TeamMembers AS t ON u.Id = t.UserId
+ WHERE t.TeamId = :TeamId
+ ORDER BY u.CreateAt DESC
+ LIMIT :Limit OFFSET :Offset
+ `, map[string]interface{}{"TeamId": teamId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetNewUsersForTeam", "store.sql_user.get_new_users.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetProfileByIds(userIds []string, allowFromCache bool) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ users := []*model.User{}
+ props := make(map[string]interface{})
+ idQuery := ""
+ remainingUserIds := make([]string, 0)
+
+ if allowFromCache {
+ for _, userId := range userIds {
+ if cacheItem, ok := profileByIdsCache.Get(userId); ok {
+ u := &model.User{}
+ *u = *cacheItem.(*model.User)
+ users = append(users, u)
+ } else {
+ remainingUserIds = append(remainingUserIds, userId)
+ }
+ }
+ if us.metrics != nil {
+ us.metrics.AddMemCacheHitCounter("Profile By Ids", float64(len(users)))
+ us.metrics.AddMemCacheMissCounter("Profile By Ids", float64(len(remainingUserIds)))
+ }
+ } else {
+ remainingUserIds = userIds
+ if us.metrics != nil {
+ us.metrics.AddMemCacheMissCounter("Profile By Ids", float64(len(remainingUserIds)))
+ }
+ }
+
+ // If everything came from the cache then just return
+ if len(remainingUserIds) == 0 {
+ result.Data = users
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ for index, userId := range remainingUserIds {
+ if len(idQuery) > 0 {
+ idQuery += ", "
+ }
+
+ props["userId"+strconv.Itoa(index)] = userId
+ idQuery += ":userId" + strconv.Itoa(index)
+ }
+
+ if _, err := us.GetReplica().Select(&users, "SELECT * FROM Users WHERE Users.Id IN ("+idQuery+")", props); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfileByIds", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+
+ cpy := &model.User{}
+ *cpy = *u
+ profileByIdsCache.AddWithExpiresInSecs(cpy.Id, cpy, PROFILE_BY_IDS_CACHE_SEC)
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetSystemAdminProfiles() store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ if _, err := us.GetReplica().Select(&users, "SELECT * FROM Users WHERE Roles LIKE :Roles", map[string]interface{}{"Roles": "%system_admin%"}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetSystemAdminProfiles", "store.sql_user.get_sysadmin_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ userMap := make(map[string]*model.User)
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ userMap[u.Id] = u
+ }
+
+ result.Data = userMap
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetByEmail(email string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ email = strings.ToLower(email)
+
+ user := model.User{}
+
+ if err := us.GetReplica().SelectOne(&user, "SELECT * FROM Users WHERE Email = :Email", map[string]interface{}{"Email": email}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetByEmail", store.MISSING_ACCOUNT_ERROR, nil, "email="+email+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = &user
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetByAuth(authData *string, authService string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if authData == nil || *authData == "" {
+ result.Err = model.NewAppError("SqlUserStore.GetByAuth", store.MISSING_AUTH_ACCOUNT_ERROR, nil, "authData='', authService="+authService, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ user := model.User{}
+
+ if err := us.GetReplica().SelectOne(&user, "SELECT * FROM Users WHERE AuthData = :AuthData AND AuthService = :AuthService", map[string]interface{}{"AuthData": authData, "AuthService": authService}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlUserStore.GetByAuth", store.MISSING_AUTH_ACCOUNT_ERROR, nil, "authData="+*authData+", authService="+authService+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Err = model.NewAppError("SqlUserStore.GetByAuth", "store.sql_user.get_by_auth.other.app_error", nil, "authData="+*authData+", authService="+authService+", "+err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ result.Data = &user
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetAllUsingAuthService(authService string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+ var data []*model.User
+
+ if _, err := us.GetReplica().Select(&data, "SELECT * FROM Users WHERE AuthService = :AuthService", map[string]interface{}{"AuthService": authService}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetByAuth", "store.sql_user.get_by_auth.other.app_error", nil, "authService="+authService+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = data
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetByUsername(username string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ user := model.User{}
+
+ if err := us.GetReplica().SelectOne(&user, "SELECT * FROM Users WHERE Username = :Username", map[string]interface{}{"Username": username}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetByUsername", "store.sql_user.get_by_username.app_error", nil, err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = &user
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetForLogin(loginId string, allowSignInWithUsername, allowSignInWithEmail, ldapEnabled bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ params := map[string]interface{}{
+ "LoginId": loginId,
+ "AllowSignInWithUsername": allowSignInWithUsername,
+ "AllowSignInWithEmail": allowSignInWithEmail,
+ "LdapEnabled": ldapEnabled,
+ }
+
+ users := []*model.User{}
+ if _, err := us.GetReplica().Select(
+ &users,
+ `SELECT
+ *
+ FROM
+ Users
+ WHERE
+ (:AllowSignInWithUsername AND Username = :LoginId)
+ OR (:AllowSignInWithEmail AND Email = :LoginId)
+ OR (:LdapEnabled AND AuthService = '`+model.USER_AUTH_SERVICE_LDAP+`' AND AuthData = :LoginId)`,
+ params); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetForLogin", "store.sql_user.get_for_login.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else if len(users) == 1 {
+ result.Data = users[0]
+ } else if len(users) > 1 {
+ result.Err = model.NewAppError("SqlUserStore.GetForLogin", "store.sql_user.get_for_login.multiple_users", nil, "", http.StatusInternalServerError)
+ } else {
+ result.Err = model.NewAppError("SqlUserStore.GetForLogin", "store.sql_user.get_for_login.app_error", nil, "", http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) VerifyEmail(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := us.GetMaster().Exec("UPDATE Users SET EmailVerified = true WHERE Id = :UserId", map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.VerifyEmail", "store.sql_user.verify_email.app_error", nil, "userId="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = userId
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetTotalUsersCount() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if count, err := us.GetReplica().SelectInt("SELECT COUNT(Id) FROM Users"); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetTotalUsersCount", "store.sql_user.get_total_users_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) PermanentDelete(userId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if _, err := us.GetMaster().Exec("DELETE FROM Users WHERE Id = :UserId", map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.PermanentDelete", "store.sql_user.permanent_delete.app_error", nil, "userId="+userId+", "+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) AnalyticsUniqueUserCount(teamId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query := ""
+ if len(teamId) > 0 {
+ query = "SELECT COUNT(DISTINCT Users.Email) From Users, TeamMembers WHERE TeamMembers.TeamId = :TeamId AND Users.Id = TeamMembers.UserId AND TeamMembers.DeleteAt = 0 AND Users.DeleteAt = 0"
+ } else {
+ query = "SELECT COUNT(DISTINCT Email) FROM Users WHERE DeleteAt = 0"
+ }
+
+ v, err := us.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlUserStore.AnalyticsUniqueUserCount", "store.sql_user.analytics_unique_user_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) AnalyticsActiveCount(timePeriod int64) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ time := model.GetMillis() - timePeriod
+
+ query := "SELECT COUNT(*) FROM Status WHERE LastActivityAt > :Time"
+
+ v, err := us.GetReplica().SelectInt(query, map[string]interface{}{"Time": time})
+ if err != nil {
+ result.Err = model.NewAppError("SqlUserStore.AnalyticsDailyActiveUsers", "store.sql_user.analytics_daily_active_users.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetUnreadCount(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if count, err := us.GetReplica().SelectInt(`
+ SELECT SUM(CASE WHEN c.Type = 'D' THEN (c.TotalMsgCount - cm.MsgCount) ELSE cm.MentionCount END)
+ FROM Channels c
+ INNER JOIN ChannelMembers cm
+ ON cm.ChannelId = c.Id
+ AND cm.UserId = :UserId
+ AND c.DeleteAt = 0`, map[string]interface{}{"UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetMentionCount", "store.sql_user.get_unread_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetUnreadCountForChannel(userId string, channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if count, err := us.GetReplica().SelectInt("SELECT SUM(CASE WHEN c.Type = 'D' THEN (c.TotalMsgCount - cm.MsgCount) ELSE cm.MentionCount END) FROM Channels c INNER JOIN ChannelMembers cm ON c.Id = :ChannelId AND cm.ChannelId = :ChannelId AND cm.UserId = :UserId", map[string]interface{}{"ChannelId": channelId, "UserId": userId}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetMentionCountForChannel", "store.sql_user.get_unread_count_for_channel.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) Search(teamId string, term string, options map[string]bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := ""
+
+ if teamId == "" {
+
+ // Id != '' is added because both SEARCH_CLAUSE and INACTIVE_CLAUSE start with an AND
+ searchQuery = `
+ SELECT
+ *
+ FROM
+ Users
+ WHERE
+ Id != ''
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Username ASC
+ LIMIT 100`
+ } else {
+ searchQuery = `
+ SELECT
+ Users.*
+ FROM
+ Users, TeamMembers
+ WHERE
+ TeamMembers.TeamId = :TeamId
+ AND Users.Id = TeamMembers.UserId
+ AND TeamMembers.DeleteAt = 0
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Users.Username ASC
+ LIMIT 100`
+ }
+
+ storeChannel <- us.performSearch(searchQuery, term, options, map[string]interface{}{"TeamId": teamId})
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) SearchWithoutTeam(term string, options map[string]bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := `
+ SELECT
+ *
+ FROM
+ Users
+ WHERE
+ (SELECT
+ COUNT(0)
+ FROM
+ TeamMembers
+ WHERE
+ TeamMembers.UserId = Users.Id
+ AND TeamMembers.DeleteAt = 0) = 0
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Username ASC
+ LIMIT 100`
+
+ storeChannel <- us.performSearch(searchQuery, term, options, map[string]interface{}{})
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) SearchNotInTeam(notInTeamId string, term string, options map[string]bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := `
+ SELECT
+ Users.*
+ FROM Users
+ LEFT JOIN TeamMembers tm
+ ON tm.UserId = Users.Id
+ AND tm.TeamId = :NotInTeamId
+ WHERE
+ (tm.UserId IS NULL OR tm.DeleteAt != 0)
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Users.Username ASC
+ LIMIT 100`
+
+ storeChannel <- us.performSearch(searchQuery, term, options, map[string]interface{}{"NotInTeamId": notInTeamId})
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) SearchNotInChannel(teamId string, channelId string, term string, options map[string]bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := ""
+ if teamId == "" {
+ searchQuery = `
+ SELECT
+ Users.*
+ FROM Users
+ LEFT JOIN ChannelMembers cm
+ ON cm.UserId = Users.Id
+ AND cm.ChannelId = :ChannelId
+ WHERE
+ cm.UserId IS NULL
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Users.Username ASC
+ LIMIT 100`
+ } else {
+ searchQuery = `
+ SELECT
+ Users.*
+ FROM Users
+ INNER JOIN TeamMembers tm
+ ON tm.UserId = Users.Id
+ AND tm.TeamId = :TeamId
+ AND tm.DeleteAt = 0
+ LEFT JOIN ChannelMembers cm
+ ON cm.UserId = Users.Id
+ AND cm.ChannelId = :ChannelId
+ WHERE
+ cm.UserId IS NULL
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Users.Username ASC
+ LIMIT 100`
+ }
+
+ storeChannel <- us.performSearch(searchQuery, term, options, map[string]interface{}{"TeamId": teamId, "ChannelId": channelId})
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) SearchInChannel(channelId string, term string, options map[string]bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ searchQuery := `
+ SELECT
+ Users.*
+ FROM
+ Users, ChannelMembers
+ WHERE
+ ChannelMembers.ChannelId = :ChannelId
+ AND ChannelMembers.UserId = Users.Id
+ SEARCH_CLAUSE
+ INACTIVE_CLAUSE
+ ORDER BY Users.Username ASC
+ LIMIT 100`
+
+ storeChannel <- us.performSearch(searchQuery, term, options, map[string]interface{}{"ChannelId": channelId})
+ close(storeChannel)
+
+ }()
+
+ return storeChannel
+}
+
+var specialUserSearchChar = []string{
+ "<",
+ ">",
+ "+",
+ "-",
+ "(",
+ ")",
+ "~",
+ ":",
+ "*",
+ "\"",
+ "!",
+ "@",
+}
+
+var postgresSearchChar = []string{
+ "(",
+ ")",
+ ":",
+ "!",
+}
+
+func (us SqlUserStore) performSearch(searchQuery string, term string, options map[string]bool, parameters map[string]interface{}) store.StoreResult {
+ result := store.StoreResult{}
+
+ // Special handling for emails
+ originalTerm := term
+ postgresUseOriginalTerm := false
+ if strings.Contains(term, "@") && strings.Contains(term, ".") {
+ if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ postgresUseOriginalTerm = true
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ lastIndex := strings.LastIndex(term, ".")
+ term = term[0:lastIndex]
+ }
+ }
+
+ // these chars have special meaning and can be treated as spaces
+ for _, c := range specialUserSearchChar {
+ term = strings.Replace(term, c, " ", -1)
+ }
+
+ searchType := USER_SEARCH_TYPE_ALL
+ if ok := options[store.USER_SEARCH_OPTION_NAMES_ONLY]; ok {
+ searchType = USER_SEARCH_TYPE_NAMES
+ } else if ok = options[store.USER_SEARCH_OPTION_NAMES_ONLY_NO_FULL_NAME]; ok {
+ searchType = USER_SEARCH_TYPE_NAMES_NO_FULL_NAME
+ } else if ok = options[store.USER_SEARCH_OPTION_ALL_NO_FULL_NAME]; ok {
+ searchType = USER_SEARCH_TYPE_ALL_NO_FULL_NAME
+ }
+
+ if ok := options[store.USER_SEARCH_OPTION_ALLOW_INACTIVE]; ok {
+ searchQuery = strings.Replace(searchQuery, "INACTIVE_CLAUSE", "", 1)
+ } else {
+ searchQuery = strings.Replace(searchQuery, "INACTIVE_CLAUSE", "AND Users.DeleteAt = 0", 1)
+ }
+
+ if term == "" {
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", "", 1)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_POSTGRES {
+ if postgresUseOriginalTerm {
+ term = originalTerm
+ // these chars will break the query and must be removed
+ for _, c := range postgresSearchChar {
+ term = strings.Replace(term, c, "", -1)
+ }
+ } else {
+ splitTerm := strings.Fields(term)
+ for i, t := range strings.Fields(term) {
+ if i == len(splitTerm)-1 {
+ splitTerm[i] = t + ":*"
+ } else {
+ splitTerm[i] = t + ":* &"
+ }
+ }
+
+ term = strings.Join(splitTerm, " ")
+ }
+
+ searchType = convertMySQLFullTextColumnsToPostgres(searchType)
+ searchClause := fmt.Sprintf("AND (%s) @@ to_tsquery('simple', :Term)", searchType)
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", searchClause, 1)
+ } else if *utils.Cfg.SqlSettings.DriverName == model.DATABASE_DRIVER_MYSQL {
+ splitTerm := strings.Fields(term)
+ for i, t := range strings.Fields(term) {
+ splitTerm[i] = "+" + t + "*"
+ }
+
+ term = strings.Join(splitTerm, " ")
+
+ searchClause := fmt.Sprintf("AND MATCH(%s) AGAINST (:Term IN BOOLEAN MODE)", searchType)
+ searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", searchClause, 1)
+ }
+
+ var users []*model.User
+
+ parameters["Term"] = term
+
+ if _, err := us.GetReplica().Select(&users, searchQuery, parameters); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.Search", "store.sql_user.search.app_error", nil, "term="+term+", "+"search_type="+searchType+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ return result
+}
+
+func (us SqlUserStore) AnalyticsGetInactiveUsersCount() store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if count, err := us.GetReplica().SelectInt("SELECT COUNT(Id) FROM Users WHERE DeleteAt > 0"); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.AnalyticsGetInactiveUsersCount", "store.sql_user.analytics_get_inactive_users_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) AnalyticsGetSystemAdminCount() store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if count, err := us.GetReplica().SelectInt("SELECT count(*) FROM Users WHERE Roles LIKE :Roles and DeleteAt = 0", map[string]interface{}{"Roles": "%system_admin%"}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.AnalyticsGetSystemAdminCount", "store.sql_user.analytics_get_system_admin_count.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = count
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetProfilesNotInTeam(teamId string, offset int, limit int) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var users []*model.User
+
+ if _, err := us.GetReplica().Select(&users, `
+ SELECT
+ u.*
+ FROM Users u
+ LEFT JOIN TeamMembers tm
+ ON tm.UserId = u.Id
+ AND tm.TeamId = :TeamId
+ AND tm.DeleteAt = 0
+ WHERE tm.UserId IS NULL
+ ORDER BY u.Username ASC
+ LIMIT :Limit OFFSET :Offset
+ `, map[string]interface{}{"TeamId": teamId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlUserStore.GetProfilesNotInTeam", "store.sql_user.get_profiles.app_error", nil, err.Error(), http.StatusInternalServerError)
+ } else {
+
+ for _, u := range users {
+ u.Sanitize(map[string]bool{})
+ }
+
+ result.Data = users
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (us SqlUserStore) GetEtagForProfilesNotInTeam(teamId string) store.StoreChannel {
+
+ storeChannel := make(store.StoreChannel)
+
+ go func() {
+ result := store.StoreResult{}
+
+ updateAt, err := us.GetReplica().SelectInt(`
+ SELECT
+ u.UpdateAt
+ FROM Users u
+ LEFT JOIN TeamMembers tm
+ ON tm.UserId = u.Id
+ AND tm.TeamId = :TeamId
+ AND tm.DeleteAt = 0
+ WHERE tm.UserId IS NULL
+ ORDER BY u.UpdateAt DESC
+ LIMIT 1
+ `, map[string]interface{}{"TeamId": teamId})
+
+ if err != nil {
+ result.Data = fmt.Sprintf("%v.%v.%v.%v", model.CurrentVersion, model.GetMillis(), utils.Cfg.PrivacySettings.ShowFullName, utils.Cfg.PrivacySettings.ShowEmailAddress)
+ } else {
+ result.Data = fmt.Sprintf("%v.%v.%v.%v", model.CurrentVersion, updateAt, utils.Cfg.PrivacySettings.ShowFullName, utils.Cfg.PrivacySettings.ShowEmailAddress)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/user_store_test.go b/store/sqlstore/user_store_test.go
new file mode 100644
index 000000000..d46513ccc
--- /dev/null
+++ b/store/sqlstore/user_store_test.go
@@ -0,0 +1,2020 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+)
+
+func TestUserStoreSave(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := model.User{}
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+
+ if err := (<-ss.User().Save(&u1)).Err; err != nil {
+ t.Fatal("couldn't save user", err)
+ }
+
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ if err := (<-ss.User().Save(&u1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update user from save")
+ }
+
+ u1.Id = ""
+ if err := (<-ss.User().Save(&u1)).Err; err == nil {
+ t.Fatal("should be unique email")
+ }
+
+ u1.Email = ""
+ if err := (<-ss.User().Save(&u1)).Err; err == nil {
+ t.Fatal("should be unique username")
+ }
+
+ u1.Email = strings.Repeat("0123456789", 20)
+ u1.Username = ""
+ if err := (<-ss.User().Save(&u1)).Err; err == nil {
+ t.Fatal("should be unique username")
+ }
+
+ for i := 0; i < 49; i++ {
+ u1.Id = ""
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+ if err := (<-ss.User().Save(&u1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+ }
+
+ u1.Id = ""
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+ if err := (<-ss.User().Save(&u1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id})).Err; err == nil {
+ t.Fatal("should be the limit")
+ }
+
+}
+
+func TestUserStoreUpdate(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.AuthService = "ldap"
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u2.Id}))
+
+ time.Sleep(100 * time.Millisecond)
+
+ if err := (<-ss.User().Update(u1, false)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ u1.Id = "missing"
+ if err := (<-ss.User().Update(u1, false)).Err; err == nil {
+ t.Fatal("Update should have failed because of missing key")
+ }
+
+ u1.Id = model.NewId()
+ if err := (<-ss.User().Update(u1, false)).Err; err == nil {
+ t.Fatal("Update should have faile because id change")
+ }
+
+ u2.Email = model.NewId()
+ if err := (<-ss.User().Update(u2, false)).Err; err == nil {
+ t.Fatal("Update should have failed because you can't modify AD/LDAP fields")
+ }
+
+ u3 := &model.User{}
+ u3.Email = model.NewId()
+ oldEmail := u3.Email
+ u3.AuthService = "gitlab"
+ store.Must(ss.User().Save(u3))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u3.Id}))
+
+ u3.Email = model.NewId()
+ if result := <-ss.User().Update(u3, false); result.Err != nil {
+ t.Fatal("Update should not have failed")
+ } else {
+ newUser := result.Data.([2]*model.User)[0]
+ if newUser.Email != oldEmail {
+ t.Fatal("Email should not have been updated as the update is not trusted")
+ }
+ }
+
+ if result := <-ss.User().Update(u3, true); result.Err != nil {
+ t.Fatal("Update should not have failed")
+ } else {
+ newUser := result.Data.([2]*model.User)[0]
+ if newUser.Email != u3.Email {
+ t.Fatal("Email should have been updated as the update is trusted")
+ }
+ }
+
+ if result := <-ss.User().UpdateLastPictureUpdate(u1.Id); result.Err != nil {
+ t.Fatal("Update should not have failed")
+ }
+}
+
+func TestUserStoreUpdateUpdateAt(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ time.Sleep(10 * time.Millisecond)
+
+ if err := (<-ss.User().UpdateUpdateAt(u1.Id)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.User().Get(u1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.User).UpdateAt <= u1.UpdateAt {
+ t.Fatal("UpdateAt not updated correctly")
+ }
+ }
+
+}
+
+func TestUserStoreUpdateFailedPasswordAttempts(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ if err := (<-ss.User().UpdateFailedPasswordAttempts(u1.Id, 3)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.User().Get(u1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.User).FailedAttempts != 3 {
+ t.Fatal("FailedAttempts not updated correctly")
+ }
+ }
+
+}
+
+func TestUserStoreGet(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ if r1 := <-ss.User().Get(u1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.User).ToJson() != u1.ToJson() {
+ t.Fatal("invalid returned user")
+ }
+ }
+
+ if err := (<-ss.User().Get("")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestUserCount(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ if result := <-ss.User().GetTotalUsersCount(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ count := result.Data.(int64)
+ if count <= 0 {
+ t.Fatal()
+ }
+ }
+}
+
+func TestGetAllUsingAuthService(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.AuthService = "someservice"
+ store.Must(ss.User().Save(u1))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.AuthService = "someservice"
+ store.Must(ss.User().Save(u2))
+
+ if r1 := <-ss.User().GetAllUsingAuthService(u1.AuthService); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) < 2 {
+ t.Fatal("invalid returned users")
+ }
+ }
+}
+
+func TestUserStoreGetAllProfiles(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+
+ if r1 := <-ss.User().GetAllProfiles(0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) < 2 {
+ t.Fatal("invalid returned users")
+ }
+ }
+
+ if r2 := <-ss.User().GetAllProfiles(0, 1); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ users := r2.Data.([]*model.User)
+ if len(users) != 1 {
+ t.Fatal("invalid returned users, limit did not work")
+ }
+ }
+
+ if r2 := <-ss.User().GetAll(); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ users := r2.Data.([]*model.User)
+ if len(users) < 2 {
+ t.Fatal("invalid returned users")
+ }
+ }
+
+ etag := ""
+ if r2 := <-ss.User().GetEtagForAllProfiles(); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ etag = r2.Data.(string)
+ }
+
+ u3 := &model.User{}
+ u3.Email = model.NewId()
+ store.Must(ss.User().Save(u3))
+
+ if r2 := <-ss.User().GetEtagForAllProfiles(); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if etag == r2.Data.(string) {
+ t.Fatal("etags should not match")
+ }
+ }
+}
+
+func TestUserStoreGetProfiles(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ if r1 := <-ss.User().GetProfiles(teamId, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r2 := <-ss.User().GetProfiles("123", 0, 100); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.([]*model.User)) != 0 {
+ t.Fatal("should have returned empty map")
+ }
+ }
+
+ etag := ""
+ if r2 := <-ss.User().GetEtagForProfiles(teamId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ etag = r2.Data.(string)
+ }
+
+ u3 := &model.User{}
+ u3.Email = model.NewId()
+ store.Must(ss.User().Save(u3))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u3.Id}))
+
+ if r2 := <-ss.User().GetEtagForProfiles(teamId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if etag == r2.Data.(string) {
+ t.Fatal("etags should not match")
+ }
+ }
+}
+
+func TestUserStoreGetProfilesInChannel(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ c1 := model.Channel{}
+ c1.TeamId = teamId
+ c1.DisplayName = "Profiles in channel"
+ c1.Name = "profiles-" + model.NewId()
+ c1.Type = model.CHANNEL_OPEN
+
+ c2 := model.Channel{}
+ c2.TeamId = teamId
+ c2.DisplayName = "Profiles in private"
+ c2.Name = "profiles-" + model.NewId()
+ c2.Type = model.CHANNEL_PRIVATE
+
+ store.Must(ss.Channel().Save(&c1))
+ store.Must(ss.Channel().Save(&c2))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = c1.Id
+ m1.UserId = u1.Id
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = c1.Id
+ m2.UserId = u2.Id
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = c2.Id
+ m3.UserId = u1.Id
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ store.Must(ss.Channel().SaveMember(&m1))
+ store.Must(ss.Channel().SaveMember(&m2))
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ if r1 := <-ss.User().GetProfilesInChannel(c1.Id, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r2 := <-ss.User().GetProfilesInChannel(c2.Id, 0, 1); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.([]*model.User)) != 1 {
+ t.Fatal("should have returned only 1 user")
+ }
+ }
+}
+
+func TestUserStoreGetProfilesWithoutTeam(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ // These usernames need to appear in the first 100 users for this to work
+
+ u1 := &model.User{}
+ u1.Username = "a000000000" + model.NewId()
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+ defer ss.User().PermanentDelete(u1.Id)
+
+ u2 := &model.User{}
+ u2.Username = "a000000001" + model.NewId()
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ defer ss.User().PermanentDelete(u2.Id)
+
+ if r1 := <-ss.User().GetProfilesWithoutTeam(0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+
+ found1 := false
+ found2 := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found1 = true
+ } else if u.Id == u2.Id {
+ found2 = true
+ }
+ }
+
+ if found1 {
+ t.Fatal("shouldn't have returned user on team")
+ } else if !found2 {
+ t.Fatal("should've returned user without any teams")
+ }
+ }
+}
+
+func TestUserStoreGetAllProfilesInChannel(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ c1 := model.Channel{}
+ c1.TeamId = teamId
+ c1.DisplayName = "Profiles in channel"
+ c1.Name = "profiles-" + model.NewId()
+ c1.Type = model.CHANNEL_OPEN
+
+ c2 := model.Channel{}
+ c2.TeamId = teamId
+ c2.DisplayName = "Profiles in private"
+ c2.Name = "profiles-" + model.NewId()
+ c2.Type = model.CHANNEL_PRIVATE
+
+ store.Must(ss.Channel().Save(&c1))
+ store.Must(ss.Channel().Save(&c2))
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = c1.Id
+ m1.UserId = u1.Id
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = c1.Id
+ m2.UserId = u2.Id
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = c2.Id
+ m3.UserId = u1.Id
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ store.Must(ss.Channel().SaveMember(&m1))
+ store.Must(ss.Channel().SaveMember(&m2))
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ if r1 := <-ss.User().GetAllProfilesInChannel(c1.Id, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.(map[string]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ if users[u1.Id].Id != u1.Id {
+ t.Fatal("invalid returned user")
+ }
+ }
+
+ if r2 := <-ss.User().GetAllProfilesInChannel(c2.Id, false); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.(map[string]*model.User)) != 1 {
+ t.Fatal("should have returned empty map")
+ }
+ }
+
+ if r2 := <-ss.User().GetAllProfilesInChannel(c2.Id, true); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.(map[string]*model.User)) != 1 {
+ t.Fatal("should have returned empty map")
+ }
+ }
+
+ if r2 := <-ss.User().GetAllProfilesInChannel(c2.Id, true); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.(map[string]*model.User)) != 1 {
+ t.Fatal("should have returned empty map")
+ }
+ }
+
+ ss.User().InvalidateProfilesInChannelCacheByUser(u1.Id)
+ ss.User().InvalidateProfilesInChannelCache(c2.Id)
+}
+
+func TestUserStoreGetProfilesNotInChannel(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ c1 := model.Channel{}
+ c1.TeamId = teamId
+ c1.DisplayName = "Profiles in channel"
+ c1.Name = "profiles-" + model.NewId()
+ c1.Type = model.CHANNEL_OPEN
+
+ c2 := model.Channel{}
+ c2.TeamId = teamId
+ c2.DisplayName = "Profiles in private"
+ c2.Name = "profiles-" + model.NewId()
+ c2.Type = model.CHANNEL_PRIVATE
+
+ store.Must(ss.Channel().Save(&c1))
+ store.Must(ss.Channel().Save(&c2))
+
+ if r1 := <-ss.User().GetProfilesNotInChannel(teamId, c1.Id, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r2 := <-ss.User().GetProfilesNotInChannel(teamId, c2.Id, 0, 100); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.([]*model.User)) != 2 {
+ t.Fatal("invalid returned users")
+ }
+ }
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = c1.Id
+ m1.UserId = u1.Id
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = c1.Id
+ m2.UserId = u2.Id
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m3 := model.ChannelMember{}
+ m3.ChannelId = c2.Id
+ m3.UserId = u1.Id
+ m3.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ store.Must(ss.Channel().SaveMember(&m1))
+ store.Must(ss.Channel().SaveMember(&m2))
+ store.Must(ss.Channel().SaveMember(&m3))
+
+ if r1 := <-ss.User().GetProfilesNotInChannel(teamId, c1.Id, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 0 {
+ t.Fatal("invalid returned users")
+ }
+ }
+
+ if r2 := <-ss.User().GetProfilesNotInChannel(teamId, c2.Id, 0, 100); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.([]*model.User)) != 1 {
+ t.Fatal("should have had 1 user not in channel")
+ }
+ }
+}
+
+func TestUserStoreGetProfilesByIds(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ if r1 := <-ss.User().GetProfileByIds([]string{u1.Id}, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 1 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfileByIds([]string{u1.Id}, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 1 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfileByIds([]string{u1.Id, u2.Id}, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfileByIds([]string{u1.Id, u2.Id}, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfileByIds([]string{u1.Id, u2.Id}, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfileByIds([]string{u1.Id}, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 1 {
+ t.Fatal("invalid returned users")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u1.Id {
+ found = true
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user")
+ }
+ }
+
+ if r2 := <-ss.User().GetProfiles("123", 0, 100); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ if len(r2.Data.([]*model.User)) != 0 {
+ t.Fatal("should have returned empty array")
+ }
+ }
+}
+
+func TestUserStoreGetProfilesByUsernames(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Username = "username1" + model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Username = "username2" + model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ if r1 := <-ss.User().GetProfilesByUsernames([]string{u1.Username, u2.Username}, teamId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ if users[0].Id != u1.Id && users[1].Id != u1.Id {
+ t.Fatal("invalid returned user 1")
+ }
+
+ if users[0].Id != u2.Id && users[1].Id != u2.Id {
+ t.Fatal("invalid returned user 2")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfilesByUsernames([]string{u1.Username}, teamId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 1 {
+ t.Fatal("invalid returned users")
+ }
+
+ if users[0].Id != u1.Id {
+ t.Fatal("invalid returned user")
+ }
+ }
+
+ team2Id := model.NewId()
+
+ u3 := &model.User{}
+ u3.Email = model.NewId()
+ u3.Username = "username3" + model.NewId()
+ store.Must(ss.User().Save(u3))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: team2Id, UserId: u3.Id}))
+
+ if r1 := <-ss.User().GetProfilesByUsernames([]string{u1.Username, u3.Username}, ""); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 2 {
+ t.Fatal("invalid returned users")
+ }
+
+ if users[0].Id != u1.Id && users[1].Id != u1.Id {
+ t.Fatal("invalid returned user 1")
+ }
+
+ if users[0].Id != u3.Id && users[1].Id != u3.Id {
+ t.Fatal("invalid returned user 3")
+ }
+ }
+
+ if r1 := <-ss.User().GetProfilesByUsernames([]string{u1.Username, u3.Username}, teamId); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ if len(users) != 1 {
+ t.Fatal("invalid returned users")
+ }
+
+ if users[0].Id != u1.Id {
+ t.Fatal("invalid returned user")
+ }
+ }
+}
+
+func TestUserStoreGetSystemAdminProfiles(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Roles = model.ROLE_SYSTEM_USER.Id + " " + model.ROLE_SYSTEM_ADMIN.Id
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ if r1 := <-ss.User().GetSystemAdminProfiles(); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.(map[string]*model.User)
+ if len(users) <= 0 {
+ t.Fatal("invalid returned system admin users")
+ }
+ }
+}
+
+func TestUserStoreGetByEmail(t *testing.T) {
+ ss := Setup()
+
+ teamid := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamid, UserId: u1.Id}))
+
+ if err := (<-ss.User().GetByEmail(u1.Email)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.User().GetByEmail("")).Err; err == nil {
+ t.Fatal("Should have failed because of missing email")
+ }
+}
+
+func TestUserStoreGetByAuthData(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ auth := "123" + model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.AuthData = &auth
+ u1.AuthService = "service"
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ if err := (<-ss.User().GetByAuth(u1.AuthData, u1.AuthService)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ rauth := ""
+ if err := (<-ss.User().GetByAuth(&rauth, "")).Err; err == nil {
+ t.Fatal("Should have failed because of missing auth data")
+ }
+}
+
+func TestUserStoreGetByUsername(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ if err := (<-ss.User().GetByUsername(u1.Username)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.User().GetByUsername("")).Err; err == nil {
+ t.Fatal("Should have failed because of missing username")
+ }
+}
+
+func TestUserStoreGetForLogin(t *testing.T) {
+ ss := Setup()
+
+ auth := model.NewId()
+
+ u1 := &model.User{
+ Email: model.NewId(),
+ Username: model.NewId(),
+ AuthService: model.USER_AUTH_SERVICE_GITLAB,
+ AuthData: &auth,
+ }
+ store.Must(ss.User().Save(u1))
+
+ auth2 := model.NewId()
+
+ u2 := &model.User{
+ Email: model.NewId(),
+ Username: model.NewId(),
+ AuthService: model.USER_AUTH_SERVICE_LDAP,
+ AuthData: &auth2,
+ }
+ store.Must(ss.User().Save(u2))
+
+ if result := <-ss.User().GetForLogin(u1.Username, true, true, true); result.Err != nil {
+ t.Fatal("Should have gotten user by username", result.Err)
+ } else if result.Data.(*model.User).Id != u1.Id {
+ t.Fatal("Should have gotten user1 by username")
+ }
+
+ if result := <-ss.User().GetForLogin(u1.Email, true, true, true); result.Err != nil {
+ t.Fatal("Should have gotten user by email", result.Err)
+ } else if result.Data.(*model.User).Id != u1.Id {
+ t.Fatal("Should have gotten user1 by email")
+ }
+
+ if result := <-ss.User().GetForLogin(*u2.AuthData, true, true, true); result.Err != nil {
+ t.Fatal("Should have gotten user by AD/LDAP AuthData", result.Err)
+ } else if result.Data.(*model.User).Id != u2.Id {
+ t.Fatal("Should have gotten user2 by AD/LDAP AuthData")
+ }
+
+ // prevent getting user by AuthData when they're not an LDAP user
+ if result := <-ss.User().GetForLogin(*u1.AuthData, true, true, true); result.Err == nil {
+ t.Fatal("Should not have gotten user by non-AD/LDAP AuthData")
+ }
+
+ // prevent getting user when different login methods are disabled
+ if result := <-ss.User().GetForLogin(u1.Username, false, true, true); result.Err == nil {
+ t.Fatal("Should have failed to get user1 by username")
+ }
+
+ if result := <-ss.User().GetForLogin(u1.Email, true, false, true); result.Err == nil {
+ t.Fatal("Should have failed to get user1 by email")
+ }
+
+ if result := <-ss.User().GetForLogin(*u2.AuthData, true, true, false); result.Err == nil {
+ t.Fatal("Should have failed to get user3 by AD/LDAP AuthData")
+ }
+
+ auth3 := model.NewId()
+
+ // test a special case where two users will have conflicting login information so we throw a special error
+ u3 := &model.User{
+ Email: model.NewId(),
+ Username: model.NewId(),
+ AuthService: model.USER_AUTH_SERVICE_LDAP,
+ AuthData: &auth3,
+ }
+ store.Must(ss.User().Save(u3))
+
+ u4 := &model.User{
+ Email: model.NewId(),
+ Username: model.NewId(),
+ AuthService: model.USER_AUTH_SERVICE_LDAP,
+ AuthData: &u3.Username,
+ }
+ store.Must(ss.User().Save(u4))
+
+ if err := (<-ss.User().GetForLogin(u3.Username, true, true, true)).Err; err == nil {
+ t.Fatal("Should have failed to get users with conflicting login information")
+ }
+}
+
+func TestUserStoreUpdatePassword(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ hashedPassword := model.HashPassword("newpwd")
+
+ if err := (<-ss.User().UpdatePassword(u1.Id, hashedPassword)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.User().GetByEmail(u1.Email); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ user := r1.Data.(*model.User)
+ if user.Password != hashedPassword {
+ t.Fatal("Password was not updated correctly")
+ }
+ }
+}
+
+func TestUserStoreDelete(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: model.NewId(), UserId: u1.Id}))
+
+ if err := (<-ss.User().PermanentDelete(u1.Id)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestUserStoreUpdateAuthData(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ service := "someservice"
+ authData := model.NewId()
+
+ if err := (<-ss.User().UpdateAuthData(u1.Id, service, &authData, "", true)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if r1 := <-ss.User().GetByEmail(u1.Email); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ user := r1.Data.(*model.User)
+ if user.AuthService != service {
+ t.Fatal("AuthService was not updated correctly")
+ }
+ if *user.AuthData != authData {
+ t.Fatal("AuthData was not updated correctly")
+ }
+ if user.Password != "" {
+ t.Fatal("Password was not cleared properly")
+ }
+ }
+}
+
+func TestUserUnreadCount(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ c1 := model.Channel{}
+ c1.TeamId = teamId
+ c1.DisplayName = "Unread Messages"
+ c1.Name = "unread-messages-" + model.NewId()
+ c1.Type = model.CHANNEL_OPEN
+
+ c2 := model.Channel{}
+ c2.TeamId = teamId
+ c2.DisplayName = "Unread Direct"
+ c2.Name = "unread-direct-" + model.NewId()
+ c2.Type = model.CHANNEL_DIRECT
+
+ u1 := &model.User{}
+ u1.Username = "user1" + model.NewId()
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.Username = "user2" + model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+
+ if err := (<-ss.Channel().Save(&c1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ m1 := model.ChannelMember{}
+ m1.ChannelId = c1.Id
+ m1.UserId = u1.Id
+ m1.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ m2 := model.ChannelMember{}
+ m2.ChannelId = c1.Id
+ m2.UserId = u2.Id
+ m2.NotifyProps = model.GetDefaultChannelNotifyProps()
+
+ store.Must(ss.Channel().SaveMember(&m1))
+ store.Must(ss.Channel().SaveMember(&m2))
+
+ m1.ChannelId = c2.Id
+ m2.ChannelId = c2.Id
+
+ if err := (<-ss.Channel().SaveDirectChannel(&c2, &m1, &m2)).Err; err != nil {
+ t.Fatal("couldn't save direct channel", err)
+ }
+
+ p1 := model.Post{}
+ p1.ChannelId = c1.Id
+ p1.UserId = u1.Id
+ p1.Message = "this is a message for @" + u2.Username
+
+ // Post one message with mention to open channel
+ store.Must(ss.Post().Save(&p1))
+ store.Must(ss.Channel().IncrementMentionCount(c1.Id, u2.Id))
+
+ // Post 2 messages without mention to direct channel
+ p2 := model.Post{}
+ p2.ChannelId = c2.Id
+ p2.UserId = u1.Id
+ p2.Message = "first message"
+ store.Must(ss.Post().Save(&p2))
+ store.Must(ss.Channel().IncrementMentionCount(c2.Id, u2.Id))
+
+ p3 := model.Post{}
+ p3.ChannelId = c2.Id
+ p3.UserId = u1.Id
+ p3.Message = "second message"
+ store.Must(ss.Post().Save(&p3))
+ store.Must(ss.Channel().IncrementMentionCount(c2.Id, u2.Id))
+
+ badge := (<-ss.User().GetUnreadCount(u2.Id)).Data.(int64)
+ if badge != 3 {
+ t.Fatal("should have 3 unread messages")
+ }
+
+ badge = (<-ss.User().GetUnreadCountForChannel(u2.Id, c1.Id)).Data.(int64)
+ if badge != 1 {
+ t.Fatal("should have 1 unread messages for that channel")
+ }
+
+ badge = (<-ss.User().GetUnreadCountForChannel(u2.Id, c2.Id)).Data.(int64)
+ if badge != 2 {
+ t.Fatal("should have 2 unread messages for that channel")
+ }
+}
+
+func TestUserStoreUpdateMfaSecret(t *testing.T) {
+ ss := Setup()
+
+ u1 := model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(&u1))
+
+ time.Sleep(100 * time.Millisecond)
+
+ if err := (<-ss.User().UpdateMfaSecret(u1.Id, "12345")).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ // should pass, no update will occur though
+ if err := (<-ss.User().UpdateMfaSecret("junk", "12345")).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestUserStoreUpdateMfaActive(t *testing.T) {
+ ss := Setup()
+
+ u1 := model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(&u1))
+
+ time.Sleep(100 * time.Millisecond)
+
+ if err := (<-ss.User().UpdateMfaActive(u1.Id, true)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ if err := (<-ss.User().UpdateMfaActive(u1.Id, false)).Err; err != nil {
+ t.Fatal(err)
+ }
+
+ // should pass, no update will occur though
+ if err := (<-ss.User().UpdateMfaActive("junk", true)).Err; err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestUserStoreGetRecentlyActiveUsersForTeam(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Status().SaveOrUpdate(&model.Status{UserId: u1.Id, Status: model.STATUS_ONLINE, Manual: false, LastActivityAt: model.GetMillis(), ActiveChannel: ""}))
+ tid := model.NewId()
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u1.Id}))
+
+ if r1 := <-ss.User().GetRecentlyActiveUsersForTeam(tid, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+}
+
+func TestUserStoreGetNewUsersForTeam(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Status().SaveOrUpdate(&model.Status{UserId: u1.Id, Status: model.STATUS_ONLINE, Manual: false, LastActivityAt: model.GetMillis(), ActiveChannel: ""}))
+ tid := model.NewId()
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u1.Id}))
+
+ if r1 := <-ss.User().GetNewUsersForTeam(tid, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+}
+
+func TestUserStoreSearch(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Username = "jimbo" + model.NewId()
+ u1.FirstName = "Tim"
+ u1.LastName = "Bill"
+ u1.Nickname = "Rob"
+ u1.Email = "harold" + model.NewId() + "@simulator.amazonses.com"
+ store.Must(ss.User().Save(u1))
+
+ u2 := &model.User{}
+ u2.Username = "jim-bobby" + model.NewId()
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+
+ u3 := &model.User{}
+ u3.Username = "jimbo" + model.NewId()
+ u3.Email = model.NewId()
+ u3.DeleteAt = 1
+ store.Must(ss.User().Save(u3))
+
+ tid := model.NewId()
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u1.Id}))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u2.Id}))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u3.Id}))
+
+ searchOptions := map[string]bool{}
+ searchOptions[store.USER_SEARCH_OPTION_NAMES_ONLY] = true
+
+ if r1 := <-ss.User().Search(tid, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found1 := false
+ found2 := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ }
+
+ if profile.Id == u3.Id {
+ found2 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("should have found user")
+ }
+
+ if found2 {
+ t.Fatal("should not have found inactive user")
+ }
+ }
+
+ searchOptions[store.USER_SEARCH_OPTION_NAMES_ONLY] = false
+
+ if r1 := <-ss.User().Search(tid, u1.Email, searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found1 := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("should have found user")
+ }
+ }
+
+ searchOptions[store.USER_SEARCH_OPTION_NAMES_ONLY] = true
+
+ // * should be treated as a space
+ if r1 := <-ss.User().Search(tid, "jimb*", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found1 := false
+ found2 := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ }
+
+ if profile.Id == u3.Id {
+ found2 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("should have found user")
+ }
+
+ if found2 {
+ t.Fatal("should not have found inactive user")
+ }
+ }
+
+ if r1 := <-ss.User().Search(tid, "harol", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found1 := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ }
+ }
+
+ if found1 {
+ t.Fatal("should not have found user")
+ }
+ }
+
+ searchOptions[store.USER_SEARCH_OPTION_ALLOW_INACTIVE] = true
+
+ if r1 := <-ss.User().Search(tid, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found1 := false
+ found2 := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ }
+
+ if profile.Id == u3.Id {
+ found2 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("should have found user")
+ }
+
+ if !found2 {
+ t.Fatal("should have found inactive user")
+ }
+ }
+
+ searchOptions[store.USER_SEARCH_OPTION_ALLOW_INACTIVE] = false
+
+ if r1 := <-ss.User().Search(tid, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().Search("", "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().Search("", "jim-bobb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ t.Log(profile.Username)
+ if profile.Id == u2.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().Search(tid, "", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ c1 := model.Channel{}
+ c1.TeamId = tid
+ c1.DisplayName = "NameName"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = *store.Must(ss.Channel().Save(&c1)).(*model.Channel)
+
+ if r1 := <-ss.User().SearchNotInChannel(tid, c1.Id, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().SearchNotInChannel("", c1.Id, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().SearchNotInChannel("junk", c1.Id, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if found {
+ t.Fatal("should not have found user")
+ }
+ }
+
+ if r1 := <-ss.User().SearchInChannel(c1.Id, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if found {
+ t.Fatal("should not have found user")
+ }
+ }
+
+ store.Must(ss.Channel().SaveMember(&model.ChannelMember{ChannelId: c1.Id, UserId: u1.Id, NotifyProps: model.GetDefaultChannelNotifyProps()}))
+
+ if r1 := <-ss.User().SearchInChannel(c1.Id, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ searchOptions = map[string]bool{}
+
+ if r1 := <-ss.User().Search(tid, "harol", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found1 := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().Search(tid, "Tim", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().Search(tid, "Bill", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().Search(tid, "Rob", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ // Search Users not in Team.
+ u4 := &model.User{}
+ u4.Username = "simon" + model.NewId()
+ u4.Email = model.NewId()
+ u4.DeleteAt = 0
+ store.Must(ss.User().Save(u4))
+
+ if r1 := <-ss.User().SearchNotInTeam(tid, "simo", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u4.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+
+ if r1 := <-ss.User().SearchNotInTeam(tid, "jimb", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found = true
+ break
+ }
+ }
+
+ if found {
+ t.Fatal("should not have found user")
+ }
+ }
+
+ // Check SearchNotInTeam finds previously deleted team members.
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u4.Id}))
+
+ if r1 := <-ss.User().SearchNotInTeam(tid, "simo", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u4.Id {
+ found = true
+ break
+ }
+ }
+
+ if found {
+ t.Fatal("should not have found user")
+ }
+ }
+
+ store.Must(ss.Team().UpdateMember(&model.TeamMember{TeamId: tid, UserId: u4.Id, DeleteAt: model.GetMillis() - 1000}))
+ if r1 := <-ss.User().SearchNotInTeam(tid, "simo", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+ found := false
+ for _, profile := range profiles {
+ if profile.Id == u4.Id {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ t.Fatal("should have found user")
+ }
+ }
+}
+
+func TestUserStoreSearchWithoutTeam(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Username = "jimbo" + model.NewId()
+ u1.FirstName = "Tim"
+ u1.LastName = "Bill"
+ u1.Nickname = "Rob"
+ u1.Email = "harold" + model.NewId() + "@simulator.amazonses.com"
+ store.Must(ss.User().Save(u1))
+
+ u2 := &model.User{}
+ u2.Username = "jim-bobby" + model.NewId()
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+
+ u3 := &model.User{}
+ u3.Username = "jimbo" + model.NewId()
+ u3.Email = model.NewId()
+ u3.DeleteAt = 1
+ store.Must(ss.User().Save(u3))
+
+ tid := model.NewId()
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: tid, UserId: u3.Id}))
+
+ searchOptions := map[string]bool{}
+ searchOptions[store.USER_SEARCH_OPTION_NAMES_ONLY] = true
+
+ if r1 := <-ss.User().SearchWithoutTeam("", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ }
+
+ if r1 := <-ss.User().SearchWithoutTeam("jim", searchOptions); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ profiles := r1.Data.([]*model.User)
+
+ found1 := false
+ found2 := false
+ found3 := false
+
+ for _, profile := range profiles {
+ if profile.Id == u1.Id {
+ found1 = true
+ } else if profile.Id == u2.Id {
+ found2 = true
+ } else if profile.Id == u3.Id {
+ found3 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("should have found user1")
+ } else if !found2 {
+ t.Fatal("should have found user2")
+ } else if found3 {
+ t.Fatal("should not have found user3")
+ }
+ }
+}
+
+func TestUserStoreAnalyticsGetInactiveUsersCount(t *testing.T) {
+ ss := Setup()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+
+ var count int64
+
+ if result := <-ss.User().AnalyticsGetInactiveUsersCount(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ count = result.Data.(int64)
+ }
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ u2.DeleteAt = model.GetMillis()
+ store.Must(ss.User().Save(u2))
+
+ if result := <-ss.User().AnalyticsGetInactiveUsersCount(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ newCount := result.Data.(int64)
+ if count != newCount-1 {
+ t.Fatal("Expected 1 more inactive users but found otherwise.", count, newCount)
+ }
+ }
+}
+
+func TestUserStoreAnalyticsGetSystemAdminCount(t *testing.T) {
+ ss := Setup()
+
+ var countBefore int64
+ if result := <-ss.User().AnalyticsGetSystemAdminCount(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ countBefore = result.Data.(int64)
+ }
+
+ u1 := model.User{}
+ u1.Email = model.NewId()
+ u1.Username = model.NewId()
+ u1.Roles = "system_user system_admin"
+
+ u2 := model.User{}
+ u2.Email = model.NewId()
+ u2.Username = model.NewId()
+
+ if err := (<-ss.User().Save(&u1)).Err; err != nil {
+ t.Fatal("couldn't save user", err)
+ }
+
+ if err := (<-ss.User().Save(&u2)).Err; err != nil {
+ t.Fatal("couldn't save user", err)
+ }
+
+ if result := <-ss.User().AnalyticsGetSystemAdminCount(); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ // We expect to find 1 more system admin than there was at the start of this test function.
+ if count := result.Data.(int64); count != countBefore+1 {
+ t.Fatal("Did not get the expected number of system admins. Expected, got: ", countBefore+1, count)
+ }
+ }
+}
+
+func TestUserStoreGetProfilesNotInTeam(t *testing.T) {
+ ss := Setup()
+
+ teamId := model.NewId()
+
+ u1 := &model.User{}
+ u1.Email = model.NewId()
+ store.Must(ss.User().Save(u1))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u1.Id}))
+ store.Must(ss.User().UpdateUpdateAt(u1.Id))
+
+ u2 := &model.User{}
+ u2.Email = model.NewId()
+ store.Must(ss.User().Save(u2))
+ store.Must(ss.User().UpdateUpdateAt(u2.Id))
+
+ var initialUsersNotInTeam int
+ var etag1, etag2, etag3 string
+
+ if er1 := <-ss.User().GetEtagForProfilesNotInTeam(teamId); er1.Err != nil {
+ t.Fatal(er1.Err)
+ } else {
+ etag1 = er1.Data.(string)
+ }
+
+ if r1 := <-ss.User().GetProfilesNotInTeam(teamId, 0, 100000); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ users := r1.Data.([]*model.User)
+ initialUsersNotInTeam = len(users)
+ if initialUsersNotInTeam < 1 {
+ t.Fatalf("Should be at least 1 user not in the team")
+ }
+
+ found := false
+ for _, u := range users {
+ if u.Id == u2.Id {
+ found = true
+ }
+ if u.Id == u1.Id {
+ t.Fatalf("Should not have found user1")
+ }
+ }
+
+ if !found {
+ t.Fatal("missing user2")
+ }
+ }
+
+ time.Sleep(time.Millisecond * 10)
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u2.Id}))
+ store.Must(ss.User().UpdateUpdateAt(u2.Id))
+
+ if er2 := <-ss.User().GetEtagForProfilesNotInTeam(teamId); er2.Err != nil {
+ t.Fatal(er2.Err)
+ } else {
+ etag2 = er2.Data.(string)
+ if etag1 == etag2 {
+ t.Fatalf("etag should have changed")
+ }
+ }
+
+ if r2 := <-ss.User().GetProfilesNotInTeam(teamId, 0, 100000); r2.Err != nil {
+ t.Fatal(r2.Err)
+ } else {
+ users := r2.Data.([]*model.User)
+
+ if len(users) != initialUsersNotInTeam-1 {
+ t.Fatalf("Should be one less user not in team")
+ }
+
+ for _, u := range users {
+ if u.Id == u2.Id {
+ t.Fatalf("Should not have found user2")
+ }
+ if u.Id == u1.Id {
+ t.Fatalf("Should not have found user1")
+ }
+ }
+ }
+
+ time.Sleep(time.Millisecond * 10)
+ store.Must(ss.Team().RemoveMember(teamId, u1.Id))
+ store.Must(ss.Team().RemoveMember(teamId, u2.Id))
+ store.Must(ss.User().UpdateUpdateAt(u1.Id))
+ store.Must(ss.User().UpdateUpdateAt(u2.Id))
+
+ if er3 := <-ss.User().GetEtagForProfilesNotInTeam(teamId); er3.Err != nil {
+ t.Fatal(er3.Err)
+ } else {
+ etag3 = er3.Data.(string)
+ t.Log(etag3)
+ if etag1 == etag3 || etag3 == etag2 {
+ t.Fatalf("etag should have changed")
+ }
+ }
+
+ if r3 := <-ss.User().GetProfilesNotInTeam(teamId, 0, 100000); r3.Err != nil {
+ t.Fatal(r3.Err)
+ } else {
+ users := r3.Data.([]*model.User)
+ found1, found2 := false, false
+ for _, u := range users {
+ if u.Id == u2.Id {
+ found2 = true
+ }
+ if u.Id == u1.Id {
+ found1 = true
+ }
+ }
+
+ if !found1 || !found2 {
+ t.Fatal("missing user1 or user2")
+ }
+ }
+
+ time.Sleep(time.Millisecond * 10)
+ u3 := &model.User{}
+ u3.Email = model.NewId()
+ store.Must(ss.User().Save(u3))
+ store.Must(ss.Team().SaveMember(&model.TeamMember{TeamId: teamId, UserId: u3.Id}))
+ store.Must(ss.User().UpdateUpdateAt(u3.Id))
+
+ if er4 := <-ss.User().GetEtagForProfilesNotInTeam(teamId); er4.Err != nil {
+ t.Fatal(er4.Err)
+ } else {
+ etag4 := er4.Data.(string)
+ t.Log(etag4)
+ if etag4 != etag3 {
+ t.Fatalf("etag should be the same")
+ }
+ }
+}
diff --git a/store/sqlstore/webhook_store.go b/store/sqlstore/webhook_store.go
new file mode 100644
index 000000000..705cd40bc
--- /dev/null
+++ b/store/sqlstore/webhook_store.go
@@ -0,0 +1,573 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "net/http"
+
+ "database/sql"
+
+ "github.com/mattermost/mattermost-server/einterfaces"
+ "github.com/mattermost/mattermost-server/model"
+ "github.com/mattermost/mattermost-server/store"
+ "github.com/mattermost/mattermost-server/utils"
+)
+
+type SqlWebhookStore struct {
+ SqlStore
+ metrics einterfaces.MetricsInterface
+}
+
+const (
+ WEBHOOK_CACHE_SIZE = 25000
+ WEBHOOK_CACHE_SEC = 900 // 15 minutes
+)
+
+var webhookCache = utils.NewLru(WEBHOOK_CACHE_SIZE)
+
+func ClearWebhookCaches() {
+ webhookCache.Purge()
+}
+
+func NewSqlWebhookStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.WebhookStore {
+ s := &SqlWebhookStore{
+ SqlStore: sqlStore,
+ metrics: metrics,
+ }
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.IncomingWebhook{}, "IncomingWebhooks").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("UserId").SetMaxSize(26)
+ table.ColMap("ChannelId").SetMaxSize(26)
+ table.ColMap("TeamId").SetMaxSize(26)
+ table.ColMap("DisplayName").SetMaxSize(64)
+ table.ColMap("Description").SetMaxSize(128)
+
+ tableo := db.AddTableWithName(model.OutgoingWebhook{}, "OutgoingWebhooks").SetKeys(false, "Id")
+ tableo.ColMap("Id").SetMaxSize(26)
+ tableo.ColMap("Token").SetMaxSize(26)
+ tableo.ColMap("CreatorId").SetMaxSize(26)
+ tableo.ColMap("ChannelId").SetMaxSize(26)
+ tableo.ColMap("TeamId").SetMaxSize(26)
+ tableo.ColMap("TriggerWords").SetMaxSize(1024)
+ tableo.ColMap("CallbackURLs").SetMaxSize(1024)
+ tableo.ColMap("DisplayName").SetMaxSize(64)
+ tableo.ColMap("Description").SetMaxSize(128)
+ tableo.ColMap("ContentType").SetMaxSize(128)
+ tableo.ColMap("TriggerWhen").SetMaxSize(1)
+ }
+
+ return s
+}
+
+func (s SqlWebhookStore) CreateIndexesIfNotExists() {
+ s.CreateIndexIfNotExists("idx_incoming_webhook_user_id", "IncomingWebhooks", "UserId")
+ s.CreateIndexIfNotExists("idx_incoming_webhook_team_id", "IncomingWebhooks", "TeamId")
+ s.CreateIndexIfNotExists("idx_outgoing_webhook_team_id", "OutgoingWebhooks", "TeamId")
+
+ s.CreateIndexIfNotExists("idx_incoming_webhook_update_at", "IncomingWebhooks", "UpdateAt")
+ s.CreateIndexIfNotExists("idx_incoming_webhook_create_at", "IncomingWebhooks", "CreateAt")
+ s.CreateIndexIfNotExists("idx_incoming_webhook_delete_at", "IncomingWebhooks", "DeleteAt")
+
+ s.CreateIndexIfNotExists("idx_outgoing_webhook_update_at", "OutgoingWebhooks", "UpdateAt")
+ s.CreateIndexIfNotExists("idx_outgoing_webhook_create_at", "OutgoingWebhooks", "CreateAt")
+ s.CreateIndexIfNotExists("idx_outgoing_webhook_delete_at", "OutgoingWebhooks", "DeleteAt")
+}
+
+func (s SqlWebhookStore) InvalidateWebhookCache(webhookId string) {
+ webhookCache.Remove(webhookId)
+}
+
+func (s SqlWebhookStore) SaveIncoming(webhook *model.IncomingWebhook) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(webhook.Id) > 0 {
+ result.Err = model.NewAppError("SqlWebhookStore.SaveIncoming", "store.sql_webhooks.save_incoming.existing.app_error", nil, "id="+webhook.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ webhook.PreSave()
+ if result.Err = webhook.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(webhook); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.SaveIncoming", "store.sql_webhooks.save_incoming.app_error", nil, "id="+webhook.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = webhook
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) UpdateIncoming(hook *model.IncomingWebhook) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ hook.UpdateAt = model.GetMillis()
+
+ if _, err := s.GetMaster().Update(hook); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.UpdateIncoming", "store.sql_webhooks.update_incoming.app_error", nil, "id="+hook.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = hook
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetIncoming(id string, allowFromCache bool) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if allowFromCache {
+ if cacheItem, ok := webhookCache.Get(id); ok {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheHitCounter("Webhook")
+ }
+ result.Data = cacheItem.(*model.IncomingWebhook)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ } else {
+ if s.metrics != nil {
+ s.metrics.IncrementMemCacheMissCounter("Webhook")
+ }
+ }
+ }
+
+ var webhook model.IncomingWebhook
+
+ if err := s.GetReplica().SelectOne(&webhook, "SELECT * FROM IncomingWebhooks WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": id}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlWebhookStore.GetIncoming", "store.sql_webhooks.get_incoming.app_error", nil, "id="+id+", err="+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlWebhookStore.GetIncoming", "store.sql_webhooks.get_incoming.app_error", nil, "id="+id+", err="+err.Error(), http.StatusInternalServerError)
+ }
+ }
+
+ if result.Err == nil {
+ webhookCache.AddWithExpiresInSecs(id, &webhook, WEBHOOK_CACHE_SEC)
+ }
+
+ result.Data = &webhook
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) DeleteIncoming(webhookId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("Update IncomingWebhooks SET DeleteAt = :DeleteAt, UpdateAt = :UpdateAt WHERE Id = :Id", map[string]interface{}{"DeleteAt": time, "UpdateAt": time, "Id": webhookId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.DeleteIncoming", "store.sql_webhooks.delete_incoming.app_error", nil, "id="+webhookId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ s.InvalidateWebhookCache(webhookId)
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) PermanentDeleteIncomingByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM IncomingWebhooks WHERE UserId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.DeleteIncomingByUser", "store.sql_webhooks.permanent_delete_incoming_by_user.app_error", nil, "id="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ ClearWebhookCaches()
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) PermanentDeleteIncomingByChannel(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM IncomingWebhooks WHERE ChannelId = :ChannelId", map[string]interface{}{"ChannelId": channelId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.DeleteIncomingByChannel", "store.sql_webhooks.permanent_delete_incoming_by_channel.app_error", nil, "id="+channelId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ ClearWebhookCaches()
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetIncomingList(offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhooks []*model.IncomingWebhook
+
+ if _, err := s.GetReplica().Select(&webhooks, "SELECT * FROM IncomingWebhooks WHERE DeleteAt = 0 LIMIT :Limit OFFSET :Offset", map[string]interface{}{"Limit": limit, "Offset": offset}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetIncomingList", "store.sql_webhooks.get_incoming_by_user.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = webhooks
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetIncomingByTeam(teamId string, offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhooks []*model.IncomingWebhook
+
+ if _, err := s.GetReplica().Select(&webhooks, "SELECT * FROM IncomingWebhooks WHERE TeamId = :TeamId AND DeleteAt = 0 LIMIT :Limit OFFSET :Offset", map[string]interface{}{"TeamId": teamId, "Limit": limit, "Offset": offset}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetIncomingByUser", "store.sql_webhooks.get_incoming_by_user.app_error", nil, "teamId="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = webhooks
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetIncomingByChannel(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhooks []*model.IncomingWebhook
+
+ if _, err := s.GetReplica().Select(&webhooks, "SELECT * FROM IncomingWebhooks WHERE ChannelId = :ChannelId AND DeleteAt = 0", map[string]interface{}{"ChannelId": channelId}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetIncomingByChannel", "store.sql_webhooks.get_incoming_by_channel.app_error", nil, "channelId="+channelId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = webhooks
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) SaveOutgoing(webhook *model.OutgoingWebhook) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ if len(webhook.Id) > 0 {
+ result.Err = model.NewAppError("SqlWebhookStore.SaveOutgoing", "store.sql_webhooks.save_outgoing.override.app_error", nil, "id="+webhook.Id, http.StatusBadRequest)
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ webhook.PreSave()
+ if result.Err = webhook.IsValid(); result.Err != nil {
+ storeChannel <- result
+ close(storeChannel)
+ return
+ }
+
+ if err := s.GetMaster().Insert(webhook); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.SaveOutgoing", "store.sql_webhooks.save_outgoing.app_error", nil, "id="+webhook.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = webhook
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetOutgoing(id string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhook model.OutgoingWebhook
+
+ if err := s.GetReplica().SelectOne(&webhook, "SELECT * FROM OutgoingWebhooks WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": id}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetOutgoing", "store.sql_webhooks.get_outgoing.app_error", nil, "id="+id+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = &webhook
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetOutgoingList(offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhooks []*model.OutgoingWebhook
+
+ if _, err := s.GetReplica().Select(&webhooks, "SELECT * FROM OutgoingWebhooks WHERE DeleteAt = 0 LIMIT :Limit OFFSET :Offset", map[string]interface{}{"Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetOutgoingList", "store.sql_webhooks.get_outgoing_by_channel.app_error", nil, "err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = webhooks
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetOutgoingByChannel(channelId string, offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhooks []*model.OutgoingWebhook
+
+ query := ""
+ if limit < 0 || offset < 0 {
+ query = "SELECT * FROM OutgoingWebhooks WHERE ChannelId = :ChannelId AND DeleteAt = 0"
+ } else {
+ query = "SELECT * FROM OutgoingWebhooks WHERE ChannelId = :ChannelId AND DeleteAt = 0 LIMIT :Limit OFFSET :Offset"
+ }
+
+ if _, err := s.GetReplica().Select(&webhooks, query, map[string]interface{}{"ChannelId": channelId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetOutgoingByChannel", "store.sql_webhooks.get_outgoing_by_channel.app_error", nil, "channelId="+channelId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = webhooks
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) GetOutgoingByTeam(teamId string, offset, limit int) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ var webhooks []*model.OutgoingWebhook
+
+ query := ""
+ if limit < 0 || offset < 0 {
+ query = "SELECT * FROM OutgoingWebhooks WHERE TeamId = :TeamId AND DeleteAt = 0"
+ } else {
+ query = "SELECT * FROM OutgoingWebhooks WHERE TeamId = :TeamId AND DeleteAt = 0 LIMIT :Limit OFFSET :Offset"
+ }
+
+ if _, err := s.GetReplica().Select(&webhooks, query, map[string]interface{}{"TeamId": teamId, "Offset": offset, "Limit": limit}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.GetOutgoingByTeam", "store.sql_webhooks.get_outgoing_by_team.app_error", nil, "teamId="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ result.Data = webhooks
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) DeleteOutgoing(webhookId string, time int64) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("Update OutgoingWebhooks SET DeleteAt = :DeleteAt, UpdateAt = :UpdateAt WHERE Id = :Id", map[string]interface{}{"DeleteAt": time, "UpdateAt": time, "Id": webhookId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.DeleteOutgoing", "store.sql_webhooks.delete_outgoing.app_error", nil, "id="+webhookId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) PermanentDeleteOutgoingByUser(userId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM OutgoingWebhooks WHERE CreatorId = :UserId", map[string]interface{}{"UserId": userId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.DeleteOutgoingByUser", "store.sql_webhooks.permanent_delete_outgoing_by_user.app_error", nil, "id="+userId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) PermanentDeleteOutgoingByChannel(channelId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ _, err := s.GetMaster().Exec("DELETE FROM OutgoingWebhooks WHERE ChannelId = :ChannelId", map[string]interface{}{"ChannelId": channelId})
+ if err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.DeleteOutgoingByChannel", "store.sql_webhooks.permanent_delete_outgoing_by_channel.app_error", nil, "id="+channelId+", err="+err.Error(), http.StatusInternalServerError)
+ }
+
+ ClearWebhookCaches()
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) UpdateOutgoing(hook *model.OutgoingWebhook) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ hook.UpdateAt = model.GetMillis()
+
+ if _, err := s.GetMaster().Update(hook); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.UpdateOutgoing", "store.sql_webhooks.update_outgoing.app_error", nil, "id="+hook.Id+", "+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = hook
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) AnalyticsIncomingCount(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT
+ COUNT(*)
+ FROM
+ IncomingWebhooks
+ WHERE
+ DeleteAt = 0`
+
+ if len(teamId) > 0 {
+ query += " AND TeamId = :TeamId"
+ }
+
+ if v, err := s.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.AnalyticsIncomingCount", "store.sql_webhooks.analytics_incoming_count.app_error", nil, "team_id="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (s SqlWebhookStore) AnalyticsOutgoingCount(teamId string) store.StoreChannel {
+ storeChannel := make(store.StoreChannel, 1)
+
+ go func() {
+ result := store.StoreResult{}
+
+ query :=
+ `SELECT
+ COUNT(*)
+ FROM
+ OutgoingWebhooks
+ WHERE
+ DeleteAt = 0`
+
+ if len(teamId) > 0 {
+ query += " AND TeamId = :TeamId"
+ }
+
+ if v, err := s.GetReplica().SelectInt(query, map[string]interface{}{"TeamId": teamId}); err != nil {
+ result.Err = model.NewAppError("SqlWebhookStore.AnalyticsOutgoingCount", "store.sql_webhooks.analytics_outgoing_count.app_error", nil, "team_id="+teamId+", err="+err.Error(), http.StatusInternalServerError)
+ } else {
+ result.Data = v
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sqlstore/webhook_store_test.go b/store/sqlstore/webhook_store_test.go
new file mode 100644
index 000000000..0e6c265ec
--- /dev/null
+++ b/store/sqlstore/webhook_store_test.go
@@ -0,0 +1,516 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package sqlstore
+
+import (
+ "testing"
+ "time"
+
+ "net/http"
+
+ "github.com/mattermost/mattermost-server/model"
+)
+
+func TestWebhookStoreSaveIncoming(t *testing.T) {
+ ss := Setup()
+ o1 := buildIncomingWebhook()
+
+ if err := (<-ss.Webhook().SaveIncoming(o1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Webhook().SaveIncoming(o1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+}
+
+func TestWebhookStoreUpdateIncoming(t *testing.T) {
+ ss := Setup()
+ o1 := buildIncomingWebhook()
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+ previousUpdatedAt := o1.UpdateAt
+
+ o1.DisplayName = "TestHook"
+ time.Sleep(10 * time.Millisecond)
+
+ if result := (<-ss.Webhook().UpdateIncoming(o1)); result.Err != nil {
+ t.Fatal("updation of incoming hook failed", result.Err)
+ } else {
+ if result.Data.(*model.IncomingWebhook).UpdateAt == previousUpdatedAt {
+ t.Fatal("should have updated the UpdatedAt of the hook")
+ }
+
+ if result.Data.(*model.IncomingWebhook).DisplayName != "TestHook" {
+ t.Fatal("display name is not updated")
+ }
+ }
+}
+
+func TestWebhookStoreGetIncoming(t *testing.T) {
+ ss := Setup()
+
+ o1 := buildIncomingWebhook()
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r1 := <-ss.Webhook().GetIncoming(o1.Id, false); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.IncomingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r1 := <-ss.Webhook().GetIncoming(o1.Id, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.IncomingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if err := (<-ss.Webhook().GetIncoming("123", false)).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+
+ if err := (<-ss.Webhook().GetIncoming("123", true)).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+
+ if err := (<-ss.Webhook().GetIncoming("123", true)).Err; err.StatusCode != http.StatusNotFound {
+ t.Fatal("Should have set the status as not found for missing id")
+ }
+}
+
+func TestWebhookStoreGetIncomingList(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.IncomingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.TeamId = model.NewId()
+
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r1 := <-ss.Webhook().GetIncomingList(0, 1000); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ found := false
+ hooks := r1.Data.([]*model.IncomingWebhook)
+ for _, hook := range hooks {
+ if hook.Id == o1.Id {
+ found = true
+ }
+ }
+ if !found {
+ t.Fatal("missing webhook")
+ }
+ }
+
+ if result := <-ss.Webhook().GetIncomingList(0, 1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if len(result.Data.([]*model.IncomingWebhook)) != 1 {
+ t.Fatal("only 1 should be returned")
+ }
+ }
+}
+
+func TestWebhookStoreGetIncomingByTeam(t *testing.T) {
+ ss := Setup()
+ o1 := buildIncomingWebhook()
+
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r1 := <-ss.Webhook().GetIncomingByTeam(o1.TeamId, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.([]*model.IncomingWebhook)[0].CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if result := <-ss.Webhook().GetIncomingByTeam("123", 0, 100); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if len(result.Data.([]*model.IncomingWebhook)) != 0 {
+ t.Fatal("no webhooks should have returned")
+ }
+ }
+}
+
+func TestWebhookStoreDeleteIncoming(t *testing.T) {
+ ss := Setup()
+ o1 := buildIncomingWebhook()
+
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r1 := <-ss.Webhook().GetIncoming(o1.Id, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.IncomingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r2 := <-ss.Webhook().DeleteIncoming(o1.Id, model.GetMillis()); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Webhook().GetIncoming(o1.Id, true)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestWebhookStoreDeleteIncomingByChannel(t *testing.T) {
+ ss := Setup()
+ o1 := buildIncomingWebhook()
+
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r1 := <-ss.Webhook().GetIncoming(o1.Id, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.IncomingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r2 := <-ss.Webhook().PermanentDeleteIncomingByChannel(o1.ChannelId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Webhook().GetIncoming(o1.Id, true)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestWebhookStoreDeleteIncomingByUser(t *testing.T) {
+ ss := Setup()
+ o1 := buildIncomingWebhook()
+
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r1 := <-ss.Webhook().GetIncoming(o1.Id, true); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.IncomingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r2 := <-ss.Webhook().PermanentDeleteIncomingByUser(o1.UserId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Webhook().GetIncoming(o1.Id, true)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func buildIncomingWebhook() *model.IncomingWebhook {
+ o1 := &model.IncomingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.TeamId = model.NewId()
+
+ return o1
+}
+
+func TestWebhookStoreSaveOutgoing(t *testing.T) {
+ ss := Setup()
+
+ o1 := model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ if err := (<-ss.Webhook().SaveOutgoing(&o1)).Err; err != nil {
+ t.Fatal("couldn't save item", err)
+ }
+
+ if err := (<-ss.Webhook().SaveOutgoing(&o1)).Err; err == nil {
+ t.Fatal("shouldn't be able to update from save")
+ }
+}
+
+func TestWebhookStoreGetOutgoing(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoing(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.OutgoingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if err := (<-ss.Webhook().GetOutgoing("123")).Err; err == nil {
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestWebhookStoreGetOutgoingList(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ o2 := &model.OutgoingWebhook{}
+ o2.ChannelId = model.NewId()
+ o2.CreatorId = model.NewId()
+ o2.TeamId = model.NewId()
+ o2.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o2 = (<-ss.Webhook().SaveOutgoing(o2)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoingList(0, 1000); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ hooks := r1.Data.([]*model.OutgoingWebhook)
+ found1 := false
+ found2 := false
+
+ for _, hook := range hooks {
+ if hook.CreateAt != o1.CreateAt {
+ found1 = true
+ }
+
+ if hook.CreateAt != o2.CreateAt {
+ found2 = true
+ }
+ }
+
+ if !found1 {
+ t.Fatal("missing hook1")
+ }
+ if !found2 {
+ t.Fatal("missing hook2")
+ }
+ }
+
+ if result := <-ss.Webhook().GetOutgoingList(0, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if len(result.Data.([]*model.OutgoingWebhook)) != 2 {
+ t.Fatal("wrong number of hooks returned")
+ }
+ }
+}
+
+func TestWebhookStoreGetOutgoingByChannel(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoingByChannel(o1.ChannelId, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.([]*model.OutgoingWebhook)[0].CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if result := <-ss.Webhook().GetOutgoingByChannel("123", -1, -1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if len(result.Data.([]*model.OutgoingWebhook)) != 0 {
+ t.Fatal("no webhooks should have returned")
+ }
+ }
+}
+
+func TestWebhookStoreGetOutgoingByTeam(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoingByTeam(o1.TeamId, 0, 100); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.([]*model.OutgoingWebhook)[0].CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if result := <-ss.Webhook().GetOutgoingByTeam("123", -1, -1); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if len(result.Data.([]*model.OutgoingWebhook)) != 0 {
+ t.Fatal("no webhooks should have returned")
+ }
+ }
+}
+
+func TestWebhookStoreDeleteOutgoing(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoing(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.OutgoingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r2 := <-ss.Webhook().DeleteOutgoing(o1.Id, model.GetMillis()); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Webhook().GetOutgoing(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestWebhookStoreDeleteOutgoingByChannel(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoing(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.OutgoingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r2 := <-ss.Webhook().PermanentDeleteOutgoingByChannel(o1.ChannelId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Webhook().GetOutgoing(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestWebhookStoreDeleteOutgoingByUser(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r1 := <-ss.Webhook().GetOutgoing(o1.Id); r1.Err != nil {
+ t.Fatal(r1.Err)
+ } else {
+ if r1.Data.(*model.OutgoingWebhook).CreateAt != o1.CreateAt {
+ t.Fatal("invalid returned webhook")
+ }
+ }
+
+ if r2 := <-ss.Webhook().PermanentDeleteOutgoingByUser(o1.CreatorId); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+
+ if r3 := (<-ss.Webhook().GetOutgoing(o1.Id)); r3.Err == nil {
+ t.Log(r3.Data)
+ t.Fatal("Missing id should have failed")
+ }
+}
+
+func TestWebhookStoreUpdateOutgoing(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ o1.Token = model.NewId()
+
+ if r2 := <-ss.Webhook().UpdateOutgoing(o1); r2.Err != nil {
+ t.Fatal(r2.Err)
+ }
+}
+
+func TestWebhookStoreCountIncoming(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.IncomingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.UserId = model.NewId()
+ o1.TeamId = model.NewId()
+
+ o1 = (<-ss.Webhook().SaveIncoming(o1)).Data.(*model.IncomingWebhook)
+
+ if r := <-ss.Webhook().AnalyticsIncomingCount(""); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ if r.Data.(int64) == 0 {
+ t.Fatal("should have at least 1 incoming hook")
+ }
+ }
+}
+
+func TestWebhookStoreCountOutgoing(t *testing.T) {
+ ss := Setup()
+
+ o1 := &model.OutgoingWebhook{}
+ o1.ChannelId = model.NewId()
+ o1.CreatorId = model.NewId()
+ o1.TeamId = model.NewId()
+ o1.CallbackURLs = []string{"http://nowhere.com/"}
+
+ o1 = (<-ss.Webhook().SaveOutgoing(o1)).Data.(*model.OutgoingWebhook)
+
+ if r := <-ss.Webhook().AnalyticsOutgoingCount(""); r.Err != nil {
+ t.Fatal(r.Err)
+ } else {
+ if r.Data.(int64) == 0 {
+ t.Fatal("should have at least 1 outgoing hook")
+ }
+ }
+}