summaryrefslogtreecommitdiffstats
path: root/store
diff options
context:
space:
mode:
authorJesse Hallam <jesse.hallam@gmail.com>2018-03-26 17:55:35 -0400
committerGitHub <noreply@github.com>2018-03-26 17:55:35 -0400
commit8491ba5740e2d9942b2612ce06aef90bb10ad4c0 (patch)
treeb58f596fef50e6a3bc79944e121476861047c250 /store
parent4a69c277a620308959ad6870d4e7c8240f9a166d (diff)
downloadchat-8491ba5740e2d9942b2612ce06aef90bb10ad4c0.tar.gz
chat-8491ba5740e2d9942b2612ce06aef90bb10ad4c0.tar.bz2
chat-8491ba5740e2d9942b2612ce06aef90bb10ad4c0.zip
Relax 4k post message limit (#8478)
* MM-9661: rename POST_MESSAGE_MAX_RUNES to \0_v1 * MM-9661: s/4000/POST_MESSAGE_MAX_RUNES_V1/ in tests * MM-9661: introduce POST_MESSAGE_MAX_RUNES_V2 * MM-9661: migrate Postgres Posts.Message column to TEXT from VARCHAR(4000) This is safe to do in a production instance since the underyling type is not changing. We explicitly don't do this automatically for MySQL, but also don't need to since the ORM would have already created a TEXT column for MySQL in that case. * MM-9661: emit MaxPostSize in client config This value remains unconfigurable at this time, but exposes the current limit to the client. The limit remains at 4k in this commit. * MM-9661: introduce and use SqlPostStore.GetMaxPostSize Enforce a byte limitation in the database, and use 1/4 of that value as the rune count limitation (assuming a worst case UTF-8 representation). * move maxPostSizeCached, lastPostsCache and lastPostTimeCache out of the global context and onto the SqlPostStore * address feedback from code review: * ensure sqlstore unit tests are actually being run * move global caches into SqlPostStore * leverage sync.Once to address a race condition * modify upgrade semantics to match new db semantics gorp's behaviour on creating columns with a maximum length on Postgres differs from MySQL: * Postgres * gorp uses TEXT for string columns without a maximum length * gorp uses VARCHAR(N) for string columns with a maximum length of N * MySQL * gorp uses TEXT for string columns with a maximum length >= 256 * gorp uses VARCHAR(N) for string columns with a maximum length of N * gorp defaults to a maximum length of 255, implying VARCHAR(255) So the Message column has been TEXT on MySQL but VARCHAR(4000) on Postgres. With the new, longer limits of 65535, and without changes to gorp, the expected behaviour is TEXT on MySQL and VARCHAR(65535) on Postgres. This commit makes the upgrade semantics match the new database semantics. Ideally, we'd revisit the gorp behaviour at a later time. * allow TestMaxPostSize test cases to actually run in parallel * default maxPostSizeCached to POST_MESSAGE_MAX_RUNES_V1 in case the once initializer panics * fix casting error * MM-9661: skip the schema migration for Postgres It turns out resizing VARCHAR requires a rewrite in some versions of Postgres, but migrating VARCHAR to TEXT does not. Given the increasing complexity, let's defer the migration to the enduser instead.
Diffstat (limited to 'store')
-rw-r--r--store/sqlstore/post_store.go205
-rw-r--r--store/store.go1
-rw-r--r--store/storetest/mocks/PostStore.go15
-rw-r--r--store/storetest/post_store.go6
4 files changed, 170 insertions, 57 deletions
diff --git a/store/sqlstore/post_store.go b/store/sqlstore/post_store.go
index 3ff9a3e1b..182cf4891 100644
--- a/store/sqlstore/post_store.go
+++ b/store/sqlstore/post_store.go
@@ -4,13 +4,13 @@
package sqlstore
import (
+ "bytes"
"fmt"
"net/http"
"regexp"
"strconv"
"strings"
-
- "bytes"
+ "sync"
l4g "github.com/alecthomas/log4go"
"github.com/mattermost/mattermost-server/einterfaces"
@@ -21,7 +21,11 @@ import (
type SqlPostStore struct {
SqlStore
- metrics einterfaces.MetricsInterface
+ metrics einterfaces.MetricsInterface
+ lastPostTimeCache *utils.Cache
+ lastPostsCache *utils.Cache
+ maxPostSizeOnce sync.Once
+ maxPostSizeCached int
}
const (
@@ -32,12 +36,9 @@ const (
LAST_POSTS_CACHE_SEC = 900 // 15 minutes
)
-var lastPostTimeCache = utils.NewLru(LAST_POST_TIME_CACHE_SIZE)
-var lastPostsCache = utils.NewLru(LAST_POSTS_CACHE_SIZE)
-
-func (s SqlPostStore) ClearCaches() {
- lastPostTimeCache.Purge()
- lastPostsCache.Purge()
+func (s *SqlPostStore) ClearCaches() {
+ s.lastPostTimeCache.Purge()
+ s.lastPostsCache.Purge()
if s.metrics != nil {
s.metrics.IncrementMemCacheInvalidationCounter("Last Post Time - Purge")
@@ -47,8 +48,11 @@ func (s SqlPostStore) ClearCaches() {
func NewSqlPostStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) store.PostStore {
s := &SqlPostStore{
- SqlStore: sqlStore,
- metrics: metrics,
+ SqlStore: sqlStore,
+ metrics: metrics,
+ lastPostTimeCache: utils.NewLru(LAST_POST_TIME_CACHE_SIZE),
+ lastPostsCache: utils.NewLru(LAST_POSTS_CACHE_SIZE),
+ maxPostSizeCached: model.POST_MESSAGE_MAX_RUNES_V1,
}
for _, db := range sqlStore.GetAllConns() {
@@ -59,18 +63,18 @@ func NewSqlPostStore(sqlStore SqlStore, metrics einterfaces.MetricsInterface) st
table.ColMap("RootId").SetMaxSize(26)
table.ColMap("ParentId").SetMaxSize(26)
table.ColMap("OriginalId").SetMaxSize(26)
- table.ColMap("Message").SetMaxSize(4000)
+ table.ColMap("Message").SetMaxSize(model.POST_MESSAGE_MAX_BYTES_V2)
table.ColMap("Type").SetMaxSize(26)
table.ColMap("Hashtags").SetMaxSize(1000)
table.ColMap("Props").SetMaxSize(8000)
- table.ColMap("Filenames").SetMaxSize(4000)
+ table.ColMap("Filenames").SetMaxSize(model.POST_FILENAMES_MAX_RUNES)
table.ColMap("FileIds").SetMaxSize(150)
}
return s
}
-func (s SqlPostStore) CreateIndexesIfNotExists() {
+func (s *SqlPostStore) CreateIndexesIfNotExists() {
s.CreateIndexIfNotExists("idx_posts_update_at", "Posts", "UpdateAt")
s.CreateIndexIfNotExists("idx_posts_create_at", "Posts", "CreateAt")
s.CreateIndexIfNotExists("idx_posts_delete_at", "Posts", "DeleteAt")
@@ -86,15 +90,23 @@ func (s SqlPostStore) CreateIndexesIfNotExists() {
s.CreateFullTextIndexIfNotExists("idx_posts_hashtags_txt", "Posts", "Hashtags")
}
-func (s SqlPostStore) Save(post *model.Post) store.StoreChannel {
+func (s *SqlPostStore) Save(post *model.Post) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
if len(post.Id) > 0 {
result.Err = model.NewAppError("SqlPostStore.Save", "store.sql_post.save.existing.app_error", nil, "id="+post.Id, http.StatusBadRequest)
return
}
+ var maxPostSize int
+ if result := <-s.GetMaxPostSize(); result.Err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Save", "store.sql_post.save.app_error", nil, "id="+post.Id+", "+result.Err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ maxPostSize = result.Data.(int)
+ }
+
post.PreSave()
- if result.Err = post.IsValid(); result.Err != nil {
+ if result.Err = post.IsValid(maxPostSize); result.Err != nil {
return
}
@@ -122,7 +134,7 @@ func (s SqlPostStore) Save(post *model.Post) store.StoreChannel {
})
}
-func (s SqlPostStore) Update(newPost *model.Post, oldPost *model.Post) store.StoreChannel {
+func (s *SqlPostStore) Update(newPost *model.Post, oldPost *model.Post) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
newPost.UpdateAt = model.GetMillis()
newPost.PreCommit()
@@ -133,7 +145,15 @@ func (s SqlPostStore) Update(newPost *model.Post, oldPost *model.Post) store.Sto
oldPost.Id = model.NewId()
oldPost.PreCommit()
- if result.Err = newPost.IsValid(); result.Err != nil {
+ var maxPostSize int
+ if result := <-s.GetMaxPostSize(); result.Err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Save", "store.sql_post.update.app_error", nil, "id="+newPost.Id+", "+result.Err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ maxPostSize = result.Data.(int)
+ }
+
+ if result.Err = newPost.IsValid(maxPostSize); result.Err != nil {
return
}
@@ -155,11 +175,19 @@ func (s SqlPostStore) Update(newPost *model.Post, oldPost *model.Post) store.Sto
})
}
-func (s SqlPostStore) Overwrite(post *model.Post) store.StoreChannel {
+func (s *SqlPostStore) Overwrite(post *model.Post) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
post.UpdateAt = model.GetMillis()
- if result.Err = post.IsValid(); result.Err != nil {
+ var maxPostSize int
+ if result := <-s.GetMaxPostSize(); result.Err != nil {
+ result.Err = model.NewAppError("SqlPostStore.Save", "store.sql_post.overwrite.app_error", nil, "id="+post.Id+", "+result.Err.Error(), http.StatusInternalServerError)
+ return
+ } else {
+ maxPostSize = result.Data.(int)
+ }
+
+ if result.Err = post.IsValid(maxPostSize); result.Err != nil {
return
}
@@ -171,7 +199,7 @@ func (s SqlPostStore) Overwrite(post *model.Post) store.StoreChannel {
})
}
-func (s SqlPostStore) GetFlaggedPosts(userId string, offset int, limit int) store.StoreChannel {
+func (s *SqlPostStore) GetFlaggedPosts(userId string, offset int, limit int) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
pl := model.NewPostList()
@@ -189,7 +217,7 @@ func (s SqlPostStore) GetFlaggedPosts(userId string, offset int, limit int) stor
})
}
-func (s SqlPostStore) GetFlaggedPostsForTeam(userId, teamId string, offset int, limit int) store.StoreChannel {
+func (s *SqlPostStore) GetFlaggedPostsForTeam(userId, teamId string, offset int, limit int) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
pl := model.NewPostList()
@@ -234,7 +262,7 @@ func (s SqlPostStore) GetFlaggedPostsForTeam(userId, teamId string, offset int,
})
}
-func (s SqlPostStore) GetFlaggedPostsForChannel(userId, channelId string, offset int, limit int) store.StoreChannel {
+func (s *SqlPostStore) GetFlaggedPostsForChannel(userId, channelId string, offset int, limit int) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
pl := model.NewPostList()
@@ -263,7 +291,7 @@ func (s SqlPostStore) GetFlaggedPostsForChannel(userId, channelId string, offset
})
}
-func (s SqlPostStore) Get(id string) store.StoreChannel {
+func (s *SqlPostStore) Get(id string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
pl := model.NewPostList()
@@ -308,7 +336,7 @@ func (s SqlPostStore) Get(id string) store.StoreChannel {
})
}
-func (s SqlPostStore) GetSingle(id string) store.StoreChannel {
+func (s *SqlPostStore) GetSingle(id string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var post model.Post
err := s.GetReplica().SelectOne(&post, "SELECT * FROM Posts WHERE Id = :Id AND DeleteAt = 0", map[string]interface{}{"Id": id})
@@ -325,12 +353,12 @@ type etagPosts struct {
UpdateAt int64
}
-func (s SqlPostStore) InvalidateLastPostTimeCache(channelId string) {
- lastPostTimeCache.Remove(channelId)
+func (s *SqlPostStore) InvalidateLastPostTimeCache(channelId string) {
+ s.lastPostTimeCache.Remove(channelId)
// Keys are "{channelid}{limit}" and caching only occurs on limits of 30 and 60
- lastPostsCache.Remove(channelId + "30")
- lastPostsCache.Remove(channelId + "60")
+ s.lastPostsCache.Remove(channelId + "30")
+ s.lastPostsCache.Remove(channelId + "60")
if s.metrics != nil {
s.metrics.IncrementMemCacheInvalidationCounter("Last Post Time - Remove by Channel Id")
@@ -338,10 +366,10 @@ func (s SqlPostStore) InvalidateLastPostTimeCache(channelId string) {
}
}
-func (s SqlPostStore) GetEtag(channelId string, allowFromCache bool) store.StoreChannel {
+func (s *SqlPostStore) GetEtag(channelId string, allowFromCache bool) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
if allowFromCache {
- if cacheItem, ok := lastPostTimeCache.Get(channelId); ok {
+ if cacheItem, ok := s.lastPostTimeCache.Get(channelId); ok {
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("Last Post Time")
}
@@ -366,11 +394,11 @@ func (s SqlPostStore) GetEtag(channelId string, allowFromCache bool) store.Store
result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, et.UpdateAt)
}
- lastPostTimeCache.AddWithExpiresInSecs(channelId, et.UpdateAt, LAST_POST_TIME_CACHE_SEC)
+ s.lastPostTimeCache.AddWithExpiresInSecs(channelId, et.UpdateAt, LAST_POST_TIME_CACHE_SEC)
})
}
-func (s SqlPostStore) Delete(postId string, time int64) store.StoreChannel {
+func (s *SqlPostStore) Delete(postId string, time int64) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
_, err := s.GetMaster().Exec("Update Posts SET DeleteAt = :DeleteAt, UpdateAt = :UpdateAt WHERE Id = :Id OR RootId = :RootId", map[string]interface{}{"DeleteAt": time, "UpdateAt": time, "Id": postId, "RootId": postId})
if err != nil {
@@ -379,7 +407,7 @@ func (s SqlPostStore) Delete(postId string, time int64) store.StoreChannel {
})
}
-func (s SqlPostStore) permanentDelete(postId string) store.StoreChannel {
+func (s *SqlPostStore) permanentDelete(postId string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
_, err := s.GetMaster().Exec("DELETE FROM Posts WHERE Id = :Id OR RootId = :RootId", map[string]interface{}{"Id": postId, "RootId": postId})
if err != nil {
@@ -388,7 +416,7 @@ func (s SqlPostStore) permanentDelete(postId string) store.StoreChannel {
})
}
-func (s SqlPostStore) permanentDeleteAllCommentByUser(userId string) store.StoreChannel {
+func (s *SqlPostStore) permanentDeleteAllCommentByUser(userId string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
_, err := s.GetMaster().Exec("DELETE FROM Posts WHERE UserId = :UserId AND RootId != ''", map[string]interface{}{"UserId": userId})
if err != nil {
@@ -397,7 +425,7 @@ func (s SqlPostStore) permanentDeleteAllCommentByUser(userId string) store.Store
})
}
-func (s SqlPostStore) PermanentDeleteByUser(userId string) store.StoreChannel {
+func (s *SqlPostStore) PermanentDeleteByUser(userId string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
// First attempt to delete all the comments for a user
if r := <-s.permanentDeleteAllCommentByUser(userId); r.Err != nil {
@@ -437,7 +465,7 @@ func (s SqlPostStore) PermanentDeleteByUser(userId string) store.StoreChannel {
})
}
-func (s SqlPostStore) PermanentDeleteByChannel(channelId string) store.StoreChannel {
+func (s *SqlPostStore) PermanentDeleteByChannel(channelId string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
if _, err := s.GetMaster().Exec("DELETE FROM Posts WHERE ChannelId = :ChannelId", map[string]interface{}{"ChannelId": channelId}); err != nil {
result.Err = model.NewAppError("SqlPostStore.PermanentDeleteByChannel", "store.sql_post.permanent_delete_by_channel.app_error", nil, "channel_id="+channelId+", "+err.Error(), http.StatusInternalServerError)
@@ -445,7 +473,7 @@ func (s SqlPostStore) PermanentDeleteByChannel(channelId string) store.StoreChan
})
}
-func (s SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFromCache bool) store.StoreChannel {
+func (s *SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFromCache bool) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
if limit > 1000 {
result.Err = model.NewAppError("SqlPostStore.GetLinearPosts", "store.sql_post.get_posts.app_error", nil, "channelId="+channelId, http.StatusBadRequest)
@@ -454,7 +482,7 @@ func (s SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFro
// Caching only occurs on limits of 30 and 60, the common limits requested by MM clients
if allowFromCache && offset == 0 && (limit == 60 || limit == 30) {
- if cacheItem, ok := lastPostsCache.Get(fmt.Sprintf("%s%v", channelId, limit)); ok {
+ if cacheItem, ok := s.lastPostsCache.Get(fmt.Sprintf("%s%v", channelId, limit)); ok {
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("Last Posts Cache")
}
@@ -498,7 +526,7 @@ func (s SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFro
// Caching only occurs on limits of 30 and 60, the common limits requested by MM clients
if offset == 0 && (limit == 60 || limit == 30) {
- lastPostsCache.AddWithExpiresInSecs(fmt.Sprintf("%s%v", channelId, limit), list, LAST_POSTS_CACHE_SEC)
+ s.lastPostsCache.AddWithExpiresInSecs(fmt.Sprintf("%s%v", channelId, limit), list, LAST_POSTS_CACHE_SEC)
}
result.Data = list
@@ -506,12 +534,12 @@ func (s SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFro
})
}
-func (s SqlPostStore) GetPostsSince(channelId string, time int64, allowFromCache bool) store.StoreChannel {
+func (s *SqlPostStore) GetPostsSince(channelId string, time int64, allowFromCache bool) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
if allowFromCache {
// If the last post in the channel's time is less than or equal to the time we are getting posts since,
// we can safely return no posts.
- if cacheItem, ok := lastPostTimeCache.Get(channelId); ok && cacheItem.(int64) <= time {
+ if cacheItem, ok := s.lastPostTimeCache.Get(channelId); ok && cacheItem.(int64) <= time {
if s.metrics != nil {
s.metrics.IncrementMemCacheHitCounter("Last Post Time")
}
@@ -576,22 +604,22 @@ func (s SqlPostStore) GetPostsSince(channelId string, time int64, allowFromCache
}
}
- lastPostTimeCache.AddWithExpiresInSecs(channelId, latestUpdate, LAST_POST_TIME_CACHE_SEC)
+ s.lastPostTimeCache.AddWithExpiresInSecs(channelId, latestUpdate, LAST_POST_TIME_CACHE_SEC)
result.Data = list
}
})
}
-func (s SqlPostStore) GetPostsBefore(channelId string, postId string, numPosts int, offset int) store.StoreChannel {
+func (s *SqlPostStore) GetPostsBefore(channelId string, postId string, numPosts int, offset int) store.StoreChannel {
return s.getPostsAround(channelId, postId, numPosts, offset, true)
}
-func (s SqlPostStore) GetPostsAfter(channelId string, postId string, numPosts int, offset int) store.StoreChannel {
+func (s *SqlPostStore) GetPostsAfter(channelId string, postId string, numPosts int, offset int) store.StoreChannel {
return s.getPostsAround(channelId, postId, numPosts, offset, false)
}
-func (s SqlPostStore) getPostsAround(channelId string, postId string, numPosts int, offset int, before bool) store.StoreChannel {
+func (s *SqlPostStore) getPostsAround(channelId string, postId string, numPosts int, offset int, before bool) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var direction string
var sort string
@@ -672,7 +700,7 @@ func (s SqlPostStore) getPostsAround(channelId string, postId string, numPosts i
})
}
-func (s SqlPostStore) getRootPosts(channelId string, offset int, limit int) store.StoreChannel {
+func (s *SqlPostStore) getRootPosts(channelId string, offset int, limit int) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var posts []*model.Post
_, err := s.GetReplica().Select(&posts, "SELECT * FROM Posts WHERE ChannelId = :ChannelId AND DeleteAt = 0 ORDER BY CreateAt DESC LIMIT :Limit OFFSET :Offset", map[string]interface{}{"ChannelId": channelId, "Offset": offset, "Limit": limit})
@@ -684,7 +712,7 @@ func (s SqlPostStore) getRootPosts(channelId string, offset int, limit int) stor
})
}
-func (s SqlPostStore) getParentsPosts(channelId string, offset int, limit int) store.StoreChannel {
+func (s *SqlPostStore) getParentsPosts(channelId string, offset int, limit int) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var posts []*model.Post
_, err := s.GetReplica().Select(&posts, `
@@ -771,7 +799,7 @@ var specialSearchChar = []string{
":",
}
-func (s SqlPostStore) Search(teamId string, userId string, params *model.SearchParams) store.StoreChannel {
+func (s *SqlPostStore) Search(teamId string, userId string, params *model.SearchParams) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
queryParams := map[string]interface{}{
"TeamId": teamId,
@@ -945,7 +973,7 @@ func (s SqlPostStore) Search(teamId string, userId string, params *model.SearchP
})
}
-func (s SqlPostStore) AnalyticsUserCountsWithPostsByDay(teamId string) store.StoreChannel {
+func (s *SqlPostStore) AnalyticsUserCountsWithPostsByDay(teamId string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
query :=
`SELECT DISTINCT
@@ -998,7 +1026,7 @@ func (s SqlPostStore) AnalyticsUserCountsWithPostsByDay(teamId string) store.Sto
})
}
-func (s SqlPostStore) AnalyticsPostCountsByDay(teamId string) store.StoreChannel {
+func (s *SqlPostStore) AnalyticsPostCountsByDay(teamId string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
query :=
`SELECT
@@ -1053,7 +1081,7 @@ func (s SqlPostStore) AnalyticsPostCountsByDay(teamId string) store.StoreChannel
})
}
-func (s SqlPostStore) AnalyticsPostCount(teamId string, mustHaveFile bool, mustHaveHashtag bool) store.StoreChannel {
+func (s *SqlPostStore) AnalyticsPostCount(teamId string, mustHaveFile bool, mustHaveHashtag bool) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
query :=
`SELECT
@@ -1084,7 +1112,7 @@ func (s SqlPostStore) AnalyticsPostCount(teamId string, mustHaveFile bool, mustH
})
}
-func (s SqlPostStore) GetPostsCreatedAt(channelId string, time int64) store.StoreChannel {
+func (s *SqlPostStore) GetPostsCreatedAt(channelId string, time int64) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
query := `SELECT * FROM Posts WHERE CreateAt = :CreateAt AND ChannelId = :ChannelId`
@@ -1099,7 +1127,7 @@ func (s SqlPostStore) GetPostsCreatedAt(channelId string, time int64) store.Stor
})
}
-func (s SqlPostStore) GetPostsByIds(postIds []string) store.StoreChannel {
+func (s *SqlPostStore) GetPostsByIds(postIds []string) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
keys := bytes.Buffer{}
params := make(map[string]interface{})
@@ -1127,7 +1155,7 @@ func (s SqlPostStore) GetPostsByIds(postIds []string) store.StoreChannel {
})
}
-func (s SqlPostStore) GetPostsBatchForIndexing(startTime int64, endTime int64, limit int) store.StoreChannel {
+func (s *SqlPostStore) GetPostsBatchForIndexing(startTime int64, endTime int64, limit int) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var posts []*model.PostForIndexing
_, err1 := s.GetSearchReplica().Select(&posts,
@@ -1167,7 +1195,7 @@ func (s SqlPostStore) GetPostsBatchForIndexing(startTime int64, endTime int64, l
})
}
-func (s SqlPostStore) PermanentDeleteBatch(endTime int64, limit int64) store.StoreChannel {
+func (s *SqlPostStore) PermanentDeleteBatch(endTime int64, limit int64) store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var query string
if s.DriverName() == "postgres" {
@@ -1191,7 +1219,7 @@ func (s SqlPostStore) PermanentDeleteBatch(endTime int64, limit int64) store.Sto
})
}
-func (s SqlPostStore) GetOldest() store.StoreChannel {
+func (s *SqlPostStore) GetOldest() store.StoreChannel {
return store.Do(func(result *store.StoreResult) {
var post model.Post
err := s.GetReplica().SelectOne(&post, "SELECT * FROM Posts ORDER BY CreateAt LIMIT 1")
@@ -1202,3 +1230,66 @@ func (s SqlPostStore) GetOldest() store.StoreChannel {
result.Data = &post
})
}
+
+func (s *SqlPostStore) determineMaxPostSize() int {
+ var maxPostSize int = model.POST_MESSAGE_MAX_RUNES_V1
+ var maxPostSizeBytes int32
+
+ if s.DriverName() == model.DATABASE_DRIVER_POSTGRES {
+ // The Post.Message column in Postgres has historically been VARCHAR(4000), but
+ // may be manually enlarged to support longer posts.
+ if err := s.GetReplica().SelectOne(&maxPostSizeBytes, `
+ SELECT
+ COALESCE(character_maximum_length, 0)
+ FROM
+ information_schema.columns
+ WHERE
+ table_name = 'posts'
+ AND column_name = 'message'
+ `); err != nil {
+ l4g.Error(utils.T("store.sql_post.query_max_post_size.error") + err.Error())
+ }
+ } else if s.DriverName() == model.DATABASE_DRIVER_MYSQL {
+ // The Post.Message column in MySQL has historically been TEXT, with a maximum
+ // limit of 65535.
+ if err := s.GetReplica().SelectOne(&maxPostSizeBytes, `
+ SELECT
+ COALESCE(CHARACTER_MAXIMUM_LENGTH, 0)
+ FROM
+ INFORMATION_SCHEMA.COLUMNS
+ WHERE
+ table_schema = DATABASE()
+ AND table_name = 'Posts'
+ AND column_name = 'Message'
+ LIMIT 0, 1
+ `); err != nil {
+ l4g.Error(utils.T("store.sql_post.query_max_post_size.error") + err.Error())
+ }
+ } else {
+ l4g.Warn(utils.T("store.sql_post.query_max_post_size.unrecognized_driver"))
+ }
+
+ l4g.Trace(utils.T("store.sql_post.query_max_post_size.max_post_size_bytes"), maxPostSizeBytes)
+
+ // Assume a worst-case representation of four bytes per rune.
+ maxPostSize = int(maxPostSizeBytes) / 4
+
+ // To maintain backwards compatibility, don't yield a maximum post
+ // size smaller than the previous limit, even though it wasn't
+ // actually possible to store 4000 runes in all cases.
+ if maxPostSize < model.POST_MESSAGE_MAX_RUNES_V1 {
+ maxPostSize = model.POST_MESSAGE_MAX_RUNES_V1
+ }
+
+ return maxPostSize
+}
+
+// GetMaxPostSize returns the maximum number of runes that may be stored in a post.
+func (s *SqlPostStore) GetMaxPostSize() store.StoreChannel {
+ return store.Do(func(result *store.StoreResult) {
+ s.maxPostSizeOnce.Do(func() {
+ s.maxPostSizeCached = s.determineMaxPostSize()
+ })
+ result.Data = s.maxPostSizeCached
+ })
+}
diff --git a/store/store.go b/store/store.go
index f070a45db..773dfff02 100644
--- a/store/store.go
+++ b/store/store.go
@@ -198,6 +198,7 @@ type PostStore interface {
GetPostsBatchForIndexing(startTime int64, endTime int64, limit int) StoreChannel
PermanentDeleteBatch(endTime int64, limit int64) StoreChannel
GetOldest() StoreChannel
+ GetMaxPostSize() StoreChannel
}
type UserStore interface {
diff --git a/store/storetest/mocks/PostStore.go b/store/storetest/mocks/PostStore.go
index c405d5030..bdd0d1d16 100644
--- a/store/storetest/mocks/PostStore.go
+++ b/store/storetest/mocks/PostStore.go
@@ -422,3 +422,18 @@ func (_m *PostStore) Update(newPost *model.Post, oldPost *model.Post) store.Stor
return r0
}
+
+func (_m *PostStore) GetMaxPostSize() store.StoreChannel {
+ ret := _m.Called()
+
+ var r0 store.StoreChannel
+ if rf, ok := ret.Get(0).(func() store.StoreChannel); ok {
+ r0 = rf()
+ } else {
+ if ret.Get(0) != nil {
+ r0 = ret.Get(0).(store.StoreChannel)
+ }
+ }
+
+ return r0
+}
diff --git a/store/storetest/post_store.go b/store/storetest/post_store.go
index 91fc40213..44ce47d9d 100644
--- a/store/storetest/post_store.go
+++ b/store/storetest/post_store.go
@@ -43,6 +43,7 @@ func TestPostStore(t *testing.T, ss store.Store) {
t.Run("GetPostsBatchForIndexing", func(t *testing.T) { testPostStoreGetPostsBatchForIndexing(t, ss) })
t.Run("PermanentDeleteBatch", func(t *testing.T) { testPostStorePermanentDeleteBatch(t, ss) })
t.Run("GetOldest", func(t *testing.T) { testPostStoreGetOldest(t, ss) })
+ t.Run("TestGetMaxPostSize", func(t *testing.T) { testGetMaxPostSize(t, ss) })
}
func testPostStoreSave(t *testing.T, ss store.Store) {
@@ -1783,3 +1784,8 @@ func testPostStoreGetOldest(t *testing.T, ss store.Store) {
assert.EqualValues(t, o2.Id, r1.Id)
}
+
+func testGetMaxPostSize(t *testing.T, ss store.Store) {
+ assert.Equal(t, model.POST_MESSAGE_MAX_RUNES_V2, (<-ss.Post().GetMaxPostSize()).Data.(int))
+ assert.Equal(t, model.POST_MESSAGE_MAX_RUNES_V2, (<-ss.Post().GetMaxPostSize()).Data.(int))
+}