summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Makefile19
-rw-r--r--api/channel.go5
-rw-r--r--api/post_test.go39
-rw-r--r--api4/elasticsearch.go7
-rw-r--r--api4/job.go14
-rw-r--r--api4/job_test.go30
-rw-r--r--api4/post_test.go156
-rw-r--r--app/admin.go3
-rw-r--r--app/elasticsearch.go15
-rw-r--r--app/file.go25
-rw-r--r--app/job.go16
-rw-r--r--app/job_test.go18
-rw-r--r--app/post.go12
-rw-r--r--app/webhook.go106
-rw-r--r--cmd/platform/channel.go4
-rw-r--r--cmd/platform/server.go11
-rw-r--r--config/config.json4
-rw-r--r--einterfaces/elasticsearch.go18
-rw-r--r--einterfaces/jobs/data_retention.go4
-rw-r--r--einterfaces/jobs/elasticsearch.go22
-rw-r--r--i18n/en.json60
-rw-r--r--jobs/jobs.go136
-rw-r--r--jobs/jobs_watcher.go92
-rw-r--r--jobs/jobserver/jobserver.go15
-rw-r--r--jobs/schedulers.go68
-rw-r--r--jobs/server.go31
-rw-r--r--jobs/testjob.go54
-rw-r--r--jobs/testscheduler.go58
-rw-r--r--jobs/testworker.go104
-rw-r--r--jobs/workers.go96
-rw-r--r--model/client4.go14
-rw-r--r--model/command_response.go23
-rw-r--r--model/command_response_test.go14
-rw-r--r--model/config.go18
-rw-r--r--model/incoming_webhook.go52
-rw-r--r--model/incoming_webhook_test.go14
-rw-r--r--model/job.go79
-rw-r--r--model/job_status.go59
-rw-r--r--model/license.go10
-rw-r--r--model/license_test.go8
-rw-r--r--model/outgoing_webhook.go63
-rw-r--r--model/outgoing_webhook_test.go2
-rw-r--r--model/post.go6
-rw-r--r--model/slack_attachment.go52
-rw-r--r--model/slack_attachment_test.go38
-rw-r--r--store/layered_store.go4
-rw-r--r--store/sql_channel_store.go2
-rw-r--r--store/sql_job_status_store.go190
-rw-r--r--store/sql_job_status_store_test.go151
-rw-r--r--store/sql_job_store.go327
-rw-r--r--store/sql_job_store_test.go341
-rw-r--r--store/sql_post_store.go44
-rw-r--r--store/sql_post_store_test.go69
-rw-r--r--store/sql_store.go1
-rw-r--r--store/sql_supplier.go10
-rw-r--r--store/sql_upgrade.go10
-rw-r--r--store/store.go11
-rw-r--r--utils/config.go11
-rw-r--r--webapp/actions/admin_actions.jsx13
-rw-r--r--webapp/actions/file_actions.jsx6
-rw-r--r--webapp/components/admin_console/admin_sidebar.jsx16
-rw-r--r--webapp/components/admin_console/elasticsearch_settings.jsx280
-rw-r--r--webapp/components/admin_console/manage_teams_modal/manage_teams_dropdown.jsx4
-rw-r--r--webapp/components/admin_console/push_settings.jsx2
-rw-r--r--webapp/components/file_preview.jsx4
-rw-r--r--webapp/components/integrations/components/abstract_outgoing_webhook.jsx131
-rw-r--r--webapp/components/integrations/components/add_outgoing_webhook.jsx36
-rw-r--r--webapp/components/integrations/components/add_outgoing_webhook/add_outgoing_webhook.jsx69
-rw-r--r--webapp/components/integrations/components/add_outgoing_webhook/index.js25
-rw-r--r--webapp/components/integrations/components/edit_outgoing_webhook.jsx183
-rw-r--r--webapp/components/integrations/components/edit_outgoing_webhook/edit_outgoing_webhook.jsx169
-rw-r--r--webapp/components/integrations/components/edit_outgoing_webhook/index.js30
-rw-r--r--webapp/components/post_view/post_attachment.jsx9
-rw-r--r--webapp/components/post_view/post_body_additional_content.jsx14
-rw-r--r--webapp/components/setting_picture.jsx72
-rw-r--r--webapp/components/sidebar.jsx6
-rw-r--r--webapp/components/signup/components/signup_email.jsx39
-rw-r--r--webapp/components/textbox.jsx4
-rw-r--r--webapp/components/webrtc/components/webrtc_sidebar.jsx4
-rwxr-xr-xwebapp/i18n/en.json21
-rw-r--r--webapp/package.json1
-rw-r--r--webapp/routes/route_admin_console.jsx5
-rw-r--r--webapp/routes/route_integrations.jsx4
-rw-r--r--webapp/tests/components/integrations/__snapshots__/add_outgoing_hook.test.jsx.snap27
-rw-r--r--webapp/tests/components/integrations/__snapshots__/edit_outgoing_hook.test.jsx.snap7
-rw-r--r--webapp/tests/components/integrations/add_outgoing_hook.test.jsx29
-rw-r--r--webapp/tests/components/integrations/edit_outgoing_hook.test.jsx31
-rw-r--r--webapp/utils/utils.jsx4
-rw-r--r--webapp/yarn.lock4
89 files changed, 3014 insertions, 1100 deletions
diff --git a/Makefile b/Makefile
index 5a72f5ceb..9407e2ea0 100644
--- a/Makefile
+++ b/Makefile
@@ -123,6 +123,14 @@ ifeq ($(BUILD_ENTERPRISE_READY),true)
docker start mattermost-openldap > /dev/null; \
sleep 10; \
fi
+
+ @if [ $(shell docker ps -a | grep -ci mattermost-elasticsearch) -eq 0 ]; then \
+ echo starting mattermost-elasticsearch; \
+ docker run --name mattermost-elasticsearch -p 9200:9200 -e "http.host=0.0.0.0" -e "transport.host=127.0.0.1" -e "ES_JAVA_OPTS=-Xms250m -Xmx250m" -d grundleborg/elasticsearch:latest > /dev/null; \
+ elif [ $(shell docker ps | grep -ci mattermost-elasticsearch) -eq 0 ]; then \
+ echo restarting mattermost-elasticsearch; \
+ docker start mattermost-elasticsearch> /dev/null; \
+ fi
endif
stop-docker:
@@ -148,6 +156,11 @@ stop-docker:
docker stop mattermost-inbucket > /dev/null; \
fi
+ @if [ $(shell docker ps -a | grep -ci mattermost-elasticsearch) -eq 1 ]; then \
+ echo stopping mattermost-elasticsearch; \
+ docker stop mattermost-elasticsearch > /dev/null; \
+ fi
+
clean-docker:
@echo Removing docker containers
@@ -175,6 +188,12 @@ clean-docker:
docker rm -v mattermost-inbucket > /dev/null; \
fi
+ @if [ $(shell docker ps -a | grep -ci mattermost-elasticsearch) -eq 1 ]; then \
+ echo removing mattermost-elasticsearch; \
+ docker stop mattermost-elasticsearch > /dev/null; \
+ docker rm -v mattermost-elasticsearch > /dev/null; \
+ fi
+
check-client-style:
@echo Checking client style
diff --git a/api/channel.go b/api/channel.go
index c7b0630e6..2a56e7c93 100644
--- a/api/channel.go
+++ b/api/channel.go
@@ -602,16 +602,15 @@ func getMyChannelMembers(c *Context, w http.ResponseWriter, r *http.Request) {
func getPinnedPosts(c *Context, w http.ResponseWriter, r *http.Request) {
params := mux.Vars(r)
channelId := params["channel_id"]
- posts := &model.PostList{}
if result := <-app.Srv.Store.Channel().GetPinnedPosts(channelId); result.Err != nil {
c.Err = result.Err
return
} else {
- posts = result.Data.(*model.PostList)
+ posts := result.Data.(*model.PostList)
+ w.Write([]byte(posts.ToJson()))
}
- w.Write([]byte(posts.ToJson()))
}
func addMember(c *Context, w http.ResponseWriter, r *http.Request) {
diff --git a/api/post_test.go b/api/post_test.go
index dea7afa39..c4e016b0b 100644
--- a/api/post_test.go
+++ b/api/post_test.go
@@ -177,8 +177,9 @@ func TestCreatePostWithCreateAt(t *testing.T) {
func testCreatePostWithOutgoingHook(
t *testing.T,
- hookContentType string,
- expectedContentType string,
+ hookContentType, expectedContentType, message, triggerWord string,
+ fileIds []string,
+ triggerWhen int,
) {
th := Setup().InitSystemAdmin()
Client := th.SystemAdminClient
@@ -221,7 +222,8 @@ func testCreatePostWithOutgoingHook(
UserName: user.Username,
PostId: post.Id,
Text: post.Message,
- TriggerWord: strings.Fields(post.Message)[0],
+ TriggerWord: triggerWord,
+ FileIds: strings.Join(post.FileIds, ","),
}
// depending on the Content-Type, we expect to find a JSON or form encoded payload
@@ -256,11 +258,17 @@ func testCreatePostWithOutgoingHook(
defer ts.Close()
// create an outgoing webhook, passing it the test server URL
- triggerWord := "bingo"
+ var triggerWords []string
+ if triggerWord != "" {
+ triggerWords = []string{triggerWord}
+ }
+
hook = &model.OutgoingWebhook{
ChannelId: channel.Id,
+ TeamId: team.Id,
ContentType: hookContentType,
- TriggerWords: []string{triggerWord},
+ TriggerWords: triggerWords,
+ TriggerWhen: triggerWhen,
CallbackURLs: []string{ts.URL},
}
@@ -271,10 +279,10 @@ func testCreatePostWithOutgoingHook(
}
// create a post to trigger the webhook
- message := triggerWord + " lorem ipusm"
post = &model.Post{
ChannelId: channel.Id,
Message: message,
+ FileIds: fileIds,
}
if result, err := Client.CreatePost(post); err != nil {
@@ -290,25 +298,34 @@ func testCreatePostWithOutgoingHook(
select {
case ok := <-success:
if !ok {
- t.Fatal("Test server was sent an invalid webhook.")
+ t.Fatal("Test server did send an invalid webhook.")
}
case <-time.After(time.Second):
- t.Fatal("Timeout, test server wasn't sent the webhook.")
+ t.Fatal("Timeout, test server did not send the webhook.")
}
}
func TestCreatePostWithOutgoingHook_form_urlencoded(t *testing.T) {
- testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded")
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "triggerword lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "triggerwordaaazzz lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "", "", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "", "", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
}
func TestCreatePostWithOutgoingHook_json(t *testing.T) {
- testCreatePostWithOutgoingHook(t, "application/json", "application/json")
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerword lorem ipsum", "triggerword", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerwordaaazzz lorem ipsum", "triggerword", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_STARTS_WITH)
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerword lorem ipsum", "", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerwordaaazzz lorem ipsum", "", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
}
// hooks created before we added the ContentType field should be considered as
// application/x-www-form-urlencoded
func TestCreatePostWithOutgoingHook_no_content_type(t *testing.T) {
- testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded")
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerword lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerwordaaazzz lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerword lorem ipsum", "", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerwordaaazzz lorem ipsum", "", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_STARTS_WITH)
}
func TestUpdatePost(t *testing.T) {
diff --git a/api4/elasticsearch.go b/api4/elasticsearch.go
index 05ef1f539..9eafec48b 100644
--- a/api4/elasticsearch.go
+++ b/api4/elasticsearch.go
@@ -19,12 +19,17 @@ func InitElasticsearch() {
}
func testElasticsearch(c *Context, w http.ResponseWriter, r *http.Request) {
+ cfg := model.ConfigFromJson(r.Body)
+ if cfg == nil {
+ cfg = utils.Cfg
+ }
+
if !app.SessionHasPermissionTo(c.Session, model.PERMISSION_MANAGE_SYSTEM) {
c.SetPermissionError(model.PERMISSION_MANAGE_SYSTEM)
return
}
- if err := app.TestElasticsearch(); err != nil {
+ if err := app.TestElasticsearch(cfg); err != nil {
c.Err = err
return
}
diff --git a/api4/job.go b/api4/job.go
index 8610d9e74..e6c17c42d 100644
--- a/api4/job.go
+++ b/api4/job.go
@@ -14,11 +14,11 @@ import (
func InitJob() {
l4g.Info("Initializing job API routes")
- BaseRoutes.Jobs.Handle("/type/{job_type:[A-Za-z0-9_-]+}/statuses", ApiSessionRequired(getJobStatusesByType)).Methods("GET")
- BaseRoutes.Jobs.Handle("/{job_id:[A-Za-z0-9]+}/status", ApiSessionRequired(getJobStatus)).Methods("GET")
+ BaseRoutes.Jobs.Handle("/type/{job_type:[A-Za-z0-9_-]+}/statuses", ApiSessionRequired(getJobsByType)).Methods("GET")
+ BaseRoutes.Jobs.Handle("/{job_id:[A-Za-z0-9]+}/status", ApiSessionRequired(getJob)).Methods("GET")
}
-func getJobStatus(c *Context, w http.ResponseWriter, r *http.Request) {
+func getJob(c *Context, w http.ResponseWriter, r *http.Request) {
c.RequireJobId()
if c.Err != nil {
return
@@ -29,7 +29,7 @@ func getJobStatus(c *Context, w http.ResponseWriter, r *http.Request) {
return
}
- if status, err := app.GetJobStatus(c.Params.JobId); err != nil {
+ if status, err := app.GetJob(c.Params.JobId); err != nil {
c.Err = err
return
} else {
@@ -37,7 +37,7 @@ func getJobStatus(c *Context, w http.ResponseWriter, r *http.Request) {
}
}
-func getJobStatusesByType(c *Context, w http.ResponseWriter, r *http.Request) {
+func getJobsByType(c *Context, w http.ResponseWriter, r *http.Request) {
c.RequireJobType()
if c.Err != nil {
return
@@ -48,10 +48,10 @@ func getJobStatusesByType(c *Context, w http.ResponseWriter, r *http.Request) {
return
}
- if statuses, err := app.GetJobStatusesByTypePage(c.Params.JobType, c.Params.Page, c.Params.PerPage); err != nil {
+ if statuses, err := app.GetJobsByTypePage(c.Params.JobType, c.Params.Page, c.Params.PerPage); err != nil {
c.Err = err
return
} else {
- w.Write([]byte(model.JobStatusesToJson(statuses)))
+ w.Write([]byte(model.JobsToJson(statuses)))
}
}
diff --git a/api4/job_test.go b/api4/job_test.go
index 0f39fc306..8bbea83e1 100644
--- a/api4/job_test.go
+++ b/api4/job_test.go
@@ -16,30 +16,30 @@ func TestGetJobStatus(t *testing.T) {
th := Setup().InitBasic().InitSystemAdmin()
defer TearDown()
- status := &model.JobStatus{
+ status := &model.Job{
Id: model.NewId(),
Status: model.NewId(),
}
- if result := <-app.Srv.Store.JobStatus().SaveOrUpdate(status); result.Err != nil {
+ if result := <-app.Srv.Store.Job().Save(status); result.Err != nil {
t.Fatal(result.Err)
}
- defer app.Srv.Store.JobStatus().Delete(status.Id)
+ defer app.Srv.Store.Job().Delete(status.Id)
- received, resp := th.SystemAdminClient.GetJobStatus(status.Id)
+ received, resp := th.SystemAdminClient.GetJob(status.Id)
CheckNoError(t, resp)
if received.Id != status.Id || received.Status != status.Status {
t.Fatal("incorrect job status received")
}
- _, resp = th.SystemAdminClient.GetJobStatus("1234")
+ _, resp = th.SystemAdminClient.GetJob("1234")
CheckBadRequestStatus(t, resp)
- _, resp = th.Client.GetJobStatus(status.Id)
+ _, resp = th.Client.GetJob(status.Id)
CheckForbiddenStatus(t, resp)
- _, resp = th.SystemAdminClient.GetJobStatus(model.NewId())
+ _, resp = th.SystemAdminClient.GetJob(model.NewId())
CheckNotFoundStatus(t, resp)
}
@@ -49,7 +49,7 @@ func TestGetJobStatusesByType(t *testing.T) {
jobType := model.NewId()
- statuses := []*model.JobStatus{
+ statuses := []*model.Job{
{
Id: model.NewId(),
Type: jobType,
@@ -68,11 +68,11 @@ func TestGetJobStatusesByType(t *testing.T) {
}
for _, status := range statuses {
- store.Must(app.Srv.Store.JobStatus().SaveOrUpdate(status))
- defer app.Srv.Store.JobStatus().Delete(status.Id)
+ store.Must(app.Srv.Store.Job().Save(status))
+ defer app.Srv.Store.Job().Delete(status.Id)
}
- received, resp := th.SystemAdminClient.GetJobStatusesByType(jobType, 0, 2)
+ received, resp := th.SystemAdminClient.GetJobsByType(jobType, 0, 2)
CheckNoError(t, resp)
if len(received) != 2 {
@@ -83,7 +83,7 @@ func TestGetJobStatusesByType(t *testing.T) {
t.Fatal("should've received second newest job second")
}
- received, resp = th.SystemAdminClient.GetJobStatusesByType(jobType, 1, 2)
+ received, resp = th.SystemAdminClient.GetJobsByType(jobType, 1, 2)
CheckNoError(t, resp)
if len(received) != 1 {
@@ -92,12 +92,12 @@ func TestGetJobStatusesByType(t *testing.T) {
t.Fatal("should've received oldest job last")
}
- _, resp = th.SystemAdminClient.GetJobStatusesByType("", 0, 60)
+ _, resp = th.SystemAdminClient.GetJobsByType("", 0, 60)
CheckNotFoundStatus(t, resp)
- _, resp = th.SystemAdminClient.GetJobStatusesByType(strings.Repeat("a", 33), 0, 60)
+ _, resp = th.SystemAdminClient.GetJobsByType(strings.Repeat("a", 33), 0, 60)
CheckBadRequestStatus(t, resp)
- _, resp = th.Client.GetJobStatusesByType(jobType, 0, 60)
+ _, resp = th.Client.GetJobsByType(jobType, 0, 60)
CheckForbiddenStatus(t, resp)
}
diff --git a/api4/post_test.go b/api4/post_test.go
index a2c0b065b..d554ca472 100644
--- a/api4/post_test.go
+++ b/api4/post_test.go
@@ -4,9 +4,13 @@
package api4
import (
+ "encoding/json"
"net/http"
+ "net/http/httptest"
+ "net/url"
"reflect"
"strconv"
+ "strings"
"testing"
"time"
@@ -101,6 +105,158 @@ func TestCreatePost(t *testing.T) {
}
}
+func testCreatePostWithOutgoingHook(
+ t *testing.T,
+ hookContentType, expectedContentType, message, triggerWord string,
+ fileIds []string,
+ triggerWhen int,
+) {
+ th := Setup().InitBasic().InitSystemAdmin()
+ defer TearDown()
+ user := th.SystemAdminUser
+ team := th.BasicTeam
+ channel := th.BasicChannel
+
+ enableOutgoingHooks := utils.Cfg.ServiceSettings.EnableOutgoingWebhooks
+ enableAdminOnlyHooks := utils.Cfg.ServiceSettings.EnableOnlyAdminIntegrations
+ defer func() {
+ utils.Cfg.ServiceSettings.EnableOutgoingWebhooks = enableOutgoingHooks
+ utils.Cfg.ServiceSettings.EnableOnlyAdminIntegrations = enableAdminOnlyHooks
+ utils.SetDefaultRolesBasedOnConfig()
+ }()
+ utils.Cfg.ServiceSettings.EnableOutgoingWebhooks = true
+ *utils.Cfg.ServiceSettings.EnableOnlyAdminIntegrations = true
+ utils.SetDefaultRolesBasedOnConfig()
+
+ var hook *model.OutgoingWebhook
+ var post *model.Post
+
+ // Create a test server that is the target of the outgoing webhook. It will
+ // validate the webhook body fields and write to the success channel on
+ // success/failure.
+ success := make(chan bool)
+ wait := make(chan bool, 1)
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ <-wait
+
+ requestContentType := r.Header.Get("Content-Type")
+ if requestContentType != expectedContentType {
+ t.Logf("Content-Type is %s, should be %s", requestContentType, expectedContentType)
+ success <- false
+ return
+ }
+
+ expectedPayload := &model.OutgoingWebhookPayload{
+ Token: hook.Token,
+ TeamId: hook.TeamId,
+ TeamDomain: team.Name,
+ ChannelId: post.ChannelId,
+ ChannelName: channel.Name,
+ Timestamp: post.CreateAt,
+ UserId: post.UserId,
+ UserName: user.Username,
+ PostId: post.Id,
+ Text: post.Message,
+ TriggerWord: triggerWord,
+ FileIds: strings.Join(post.FileIds, ","),
+ }
+
+ // depending on the Content-Type, we expect to find a JSON or form encoded payload
+ if requestContentType == "application/json" {
+ decoder := json.NewDecoder(r.Body)
+ o := &model.OutgoingWebhookPayload{}
+ decoder.Decode(&o)
+
+ if !reflect.DeepEqual(expectedPayload, o) {
+ t.Logf("JSON payload is %+v, should be %+v", o, expectedPayload)
+ success <- false
+ return
+ }
+ } else {
+ err := r.ParseForm()
+ if err != nil {
+ t.Logf("Error parsing form: %q", err)
+ success <- false
+ return
+ }
+
+ expectedFormValues, _ := url.ParseQuery(expectedPayload.ToFormValues())
+ if !reflect.DeepEqual(expectedFormValues, r.Form) {
+ t.Logf("Form values are: %q\n, should be: %q\n", r.Form, expectedFormValues)
+ success <- false
+ return
+ }
+ }
+
+ success <- true
+ }))
+ defer ts.Close()
+
+ // create an outgoing webhook, passing it the test server URL
+ var triggerWords []string
+ if triggerWord != "" {
+ triggerWords = []string{triggerWord}
+ }
+
+ hook = &model.OutgoingWebhook{
+ ChannelId: channel.Id,
+ TeamId: team.Id,
+ ContentType: hookContentType,
+ TriggerWords: triggerWords,
+ TriggerWhen: triggerWhen,
+ CallbackURLs: []string{ts.URL},
+ }
+
+ hook, resp := th.SystemAdminClient.CreateOutgoingWebhook(hook)
+ CheckNoError(t, resp)
+
+ // create a post to trigger the webhook
+ post = &model.Post{
+ ChannelId: channel.Id,
+ Message: message,
+ FileIds: fileIds,
+ }
+
+ post, resp = th.SystemAdminClient.CreatePost(post)
+ CheckNoError(t, resp)
+
+ wait <- true
+
+ // We wait for the test server to write to the success channel and we make
+ // the test fail if that doesn't happen before the timeout.
+ select {
+ case ok := <-success:
+ if !ok {
+ t.Fatal("Test server did send an invalid webhook.")
+ }
+ case <-time.After(time.Second):
+ t.Fatal("Timeout, test server did not send the webhook.")
+ }
+}
+
+func TestCreatePostWithOutgoingHook_form_urlencoded(t *testing.T) {
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "triggerword lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "triggerwordaaazzz lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "", "", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/x-www-form-urlencoded", "application/x-www-form-urlencoded", "", "", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
+}
+
+func TestCreatePostWithOutgoingHook_json(t *testing.T) {
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerword lorem ipsum", "triggerword", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerwordaaazzz lorem ipsum", "triggerword", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_STARTS_WITH)
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerword lorem ipsum", "", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "application/json", "application/json", "triggerwordaaazzz lorem ipsum", "", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
+}
+
+// hooks created before we added the ContentType field should be considered as
+// application/x-www-form-urlencoded
+func TestCreatePostWithOutgoingHook_no_content_type(t *testing.T) {
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerword lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerwordaaazzz lorem ipsum", "triggerword", []string{"file_id_1"}, app.TRIGGERWORDS_STARTS_WITH)
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerword lorem ipsum", "", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_EXACT_MATCH)
+ testCreatePostWithOutgoingHook(t, "", "application/x-www-form-urlencoded", "triggerwordaaazzz lorem ipsum", "", []string{"file_id_1, file_id_2"}, app.TRIGGERWORDS_STARTS_WITH)
+}
+
func TestUpdatePost(t *testing.T) {
th := Setup().InitBasic().InitSystemAdmin()
defer TearDown()
diff --git a/app/admin.go b/app/admin.go
index 8b7d64b53..6fbe150c4 100644
--- a/app/admin.go
+++ b/app/admin.go
@@ -16,6 +16,7 @@ import (
"github.com/mattermost/platform/model"
"github.com/mattermost/platform/store"
"github.com/mattermost/platform/utils"
+ "github.com/mattermost/platform/jobs"
)
func GetLogs(page, perPage int) ([]string, *model.AppError) {
@@ -187,6 +188,8 @@ func RecycleDatabaseConnection() {
l4g.Warn(utils.T("api.admin.recycle_db_start.warn"))
Srv.Store = store.NewLayeredStore()
+ jobs.Srv.Store = Srv.Store
+
time.Sleep(20 * time.Second)
oldStore.Close()
diff --git a/app/elasticsearch.go b/app/elasticsearch.go
index c021b15e8..ef34a6074 100644
--- a/app/elasticsearch.go
+++ b/app/elasticsearch.go
@@ -8,11 +8,20 @@ import (
"github.com/mattermost/platform/model"
"github.com/mattermost/platform/einterfaces"
+ "github.com/mattermost/platform/utils"
)
-func TestElasticsearch() *model.AppError {
- if esI := einterfaces.GetElasticSearchInterface(); esI != nil {
- if err := esI.TestConfig(); err != nil {
+func TestElasticsearch(cfg *model.Config) *model.AppError {
+ if *cfg.ElasticSearchSettings.Password == model.FAKE_SETTING {
+ if *cfg.ElasticSearchSettings.ConnectionUrl == *utils.Cfg.ElasticSearchSettings.ConnectionUrl && *cfg.ElasticSearchSettings.Username == *utils.Cfg.ElasticSearchSettings.Username {
+ *cfg.ElasticSearchSettings.Password = *utils.Cfg.ElasticSearchSettings.Password
+ } else {
+ return model.NewAppError("TestElasticsearch", "ent.elasticsearch.test_config.reenter_password", nil, "", http.StatusBadRequest)
+ }
+ }
+
+ if esI := einterfaces.GetElasticsearchInterface(); esI != nil {
+ if err := esI.TestConfig(cfg); err != nil {
return err
}
} else {
diff --git a/app/file.go b/app/file.go
index d21fd4a14..a4e112e98 100644
--- a/app/file.go
+++ b/app/file.go
@@ -480,15 +480,24 @@ func DoUploadFile(teamId string, channelId string, userId string, rawFilename st
}
func HandleImages(previewPathList []string, thumbnailPathList []string, fileData [][]byte) {
- for i, data := range fileData {
- go func(i int, data []byte) {
- img, width, height := prepareImage(fileData[i])
- if img != nil {
- go generateThumbnailImage(*img, thumbnailPathList[i], width, height)
- go generatePreviewImage(*img, previewPathList[i], width)
- }
- }(i, data)
+ wg := new(sync.WaitGroup)
+
+ for i := range fileData {
+ img, width, height := prepareImage(fileData[i])
+ if img != nil {
+ wg.Add(2)
+ go func(img *image.Image, path string, width int, height int) {
+ defer wg.Done()
+ generateThumbnailImage(*img, path, width, height)
+ }(img,thumbnailPathList[i], width, height)
+
+ go func(img *image.Image, path string, width int) {
+ defer wg.Done()
+ generatePreviewImage(*img, path, width)
+ }(img, previewPathList[i], width)
+ }
}
+ wg.Wait()
}
func prepareImage(fileData []byte) (*image.Image, int, int) {
diff --git a/app/job.go b/app/job.go
index 00439e4d2..c625ce15f 100644
--- a/app/job.go
+++ b/app/job.go
@@ -7,22 +7,22 @@ import (
"github.com/mattermost/platform/model"
)
-func GetJobStatus(id string) (*model.JobStatus, *model.AppError) {
- if result := <-Srv.Store.JobStatus().Get(id); result.Err != nil {
+func GetJob(id string) (*model.Job, *model.AppError) {
+ if result := <-Srv.Store.Job().Get(id); result.Err != nil {
return nil, result.Err
} else {
- return result.Data.(*model.JobStatus), nil
+ return result.Data.(*model.Job), nil
}
}
-func GetJobStatusesByTypePage(jobType string, page int, perPage int) ([]*model.JobStatus, *model.AppError) {
- return GetJobStatusesByType(jobType, page*perPage, perPage)
+func GetJobsByTypePage(jobType string, page int, perPage int) ([]*model.Job, *model.AppError) {
+ return GetJobsByType(jobType, page*perPage, perPage)
}
-func GetJobStatusesByType(jobType string, offset int, limit int) ([]*model.JobStatus, *model.AppError) {
- if result := <-Srv.Store.JobStatus().GetAllByTypePage(jobType, offset, limit); result.Err != nil {
+func GetJobsByType(jobType string, offset int, limit int) ([]*model.Job, *model.AppError) {
+ if result := <-Srv.Store.Job().GetAllByTypePage(jobType, offset, limit); result.Err != nil {
return nil, result.Err
} else {
- return result.Data.([]*model.JobStatus), nil
+ return result.Data.([]*model.Job), nil
}
}
diff --git a/app/job_test.go b/app/job_test.go
index 20e9dee8a..ced65788f 100644
--- a/app/job_test.go
+++ b/app/job_test.go
@@ -13,17 +13,17 @@ import (
func TestGetJobStatus(t *testing.T) {
Setup()
- status := &model.JobStatus{
+ status := &model.Job{
Id: model.NewId(),
Status: model.NewId(),
}
- if result := <-Srv.Store.JobStatus().SaveOrUpdate(status); result.Err != nil {
+ if result := <-Srv.Store.Job().Save(status); result.Err != nil {
t.Fatal(result.Err)
}
- defer Srv.Store.JobStatus().Delete(status.Id)
+ defer Srv.Store.Job().Delete(status.Id)
- if received, err := GetJobStatus(status.Id); err != nil {
+ if received, err := GetJob(status.Id); err != nil {
t.Fatal(err)
} else if received.Id != status.Id || received.Status != status.Status {
t.Fatal("inccorrect job status received")
@@ -35,7 +35,7 @@ func TestGetJobStatusesByType(t *testing.T) {
jobType := model.NewId()
- statuses := []*model.JobStatus{
+ statuses := []*model.Job{
{
Id: model.NewId(),
Type: jobType,
@@ -54,11 +54,11 @@ func TestGetJobStatusesByType(t *testing.T) {
}
for _, status := range statuses {
- store.Must(Srv.Store.JobStatus().SaveOrUpdate(status))
- defer Srv.Store.JobStatus().Delete(status.Id)
+ store.Must(Srv.Store.Job().Save(status))
+ defer Srv.Store.Job().Delete(status.Id)
}
- if received, err := GetJobStatusesByType(jobType, 0, 2); err != nil {
+ if received, err := GetJobsByType(jobType, 0, 2); err != nil {
t.Fatal(err)
} else if len(received) != 2 {
t.Fatal("received wrong number of statuses")
@@ -68,7 +68,7 @@ func TestGetJobStatusesByType(t *testing.T) {
t.Fatal("should've received second newest job second")
}
- if received, err := GetJobStatusesByType(jobType, 2, 2); err != nil {
+ if received, err := GetJobsByType(jobType, 2, 2); err != nil {
t.Fatal(err)
} else if len(received) != 1 {
t.Fatal("received wrong number of statuses")
diff --git a/app/post.go b/app/post.go
index dd0b24bac..7ad072943 100644
--- a/app/post.go
+++ b/app/post.go
@@ -99,7 +99,7 @@ func CreatePost(post *model.Post, teamId string, triggerWebhooks bool) (*model.P
rpost = result.Data.(*model.Post)
}
- esInterface := einterfaces.GetElasticSearchInterface()
+ esInterface := einterfaces.GetElasticsearchInterface()
if esInterface != nil && *utils.Cfg.ElasticSearchSettings.EnableIndexing {
go esInterface.IndexPost(rpost, teamId)
}
@@ -284,7 +284,7 @@ func UpdatePost(post *model.Post, safeUpdate bool) (*model.Post, *model.AppError
} else {
rpost := result.Data.(*model.Post)
- esInterface := einterfaces.GetElasticSearchInterface()
+ esInterface := einterfaces.GetElasticsearchInterface()
if esInterface != nil && *utils.Cfg.ElasticSearchSettings.EnableIndexing {
go func() {
if rchannel := <-Srv.Store.Channel().GetForPost(rpost.Id); rchannel.Err != nil {
@@ -471,7 +471,7 @@ func DeletePost(postId string) (*model.Post, *model.AppError) {
go DeletePostFiles(post)
go DeleteFlaggedPosts(post.Id)
- esInterface := einterfaces.GetElasticSearchInterface()
+ esInterface := einterfaces.GetElasticsearchInterface()
if esInterface != nil && *utils.Cfg.ElasticSearchSettings.EnableIndexing {
go esInterface.DeletePost(post.Id)
}
@@ -502,8 +502,8 @@ func DeletePostFiles(post *model.Post) {
func SearchPostsInTeam(terms string, userId string, teamId string, isOrSearch bool) (*model.PostList, *model.AppError) {
paramsList := model.ParseSearchParams(terms)
- esInterface := einterfaces.GetElasticSearchInterface()
- if esInterface != nil && *utils.Cfg.ElasticSearchSettings.EnableSearching && utils.IsLicensed && *utils.License.Features.ElasticSearch {
+ esInterface := einterfaces.GetElasticsearchInterface()
+ if esInterface != nil && *utils.Cfg.ElasticSearchSettings.EnableSearching && utils.IsLicensed && *utils.License.Features.Elasticsearch {
finalParamsList := []*model.SearchParams{}
for _, params := range paramsList {
@@ -539,7 +539,7 @@ func SearchPostsInTeam(terms string, userId string, teamId string, isOrSearch bo
return nil, err
}
- postIds, err := einterfaces.GetElasticSearchInterface().SearchPosts(userChannels, finalParamsList)
+ postIds, err := einterfaces.GetElasticsearchInterface().SearchPosts(userChannels, finalParamsList)
if err != nil {
return nil, err
}
diff --git a/app/webhook.go b/app/webhook.go
index 6a7bb16e1..e92805608 100644
--- a/app/webhook.go
+++ b/app/webhook.go
@@ -18,8 +18,8 @@ import (
)
const (
- TRIGGERWORDS_FULL = 0
- TRIGGERWORDS_STARTSWITH = 1
+ TRIGGERWORDS_EXACT_MATCH = 0
+ TRIGGERWORDS_STARTS_WITH = 1
)
func handleWebhookEvents(post *model.Post, team *model.Team, channel *model.Channel, user *model.User) *model.AppError {
@@ -42,74 +42,80 @@ func handleWebhookEvents(post *model.Post, team *model.Team, channel *model.Chan
return nil
}
+ var firstWord, triggerWord string
+
splitWords := strings.Fields(post.Message)
- if len(splitWords) == 0 {
- return nil
+ if len(splitWords) > 0 {
+ firstWord = splitWords[0]
}
- firstWord := splitWords[0]
relevantHooks := []*model.OutgoingWebhook{}
for _, hook := range hooks {
if hook.ChannelId == post.ChannelId || len(hook.ChannelId) == 0 {
if hook.ChannelId == post.ChannelId && len(hook.TriggerWords) == 0 {
relevantHooks = append(relevantHooks, hook)
- } else if hook.TriggerWhen == TRIGGERWORDS_FULL && hook.HasTriggerWord(firstWord) {
+ triggerWord = ""
+ } else if hook.TriggerWhen == TRIGGERWORDS_EXACT_MATCH && hook.TriggerWordExactMatch(firstWord) {
relevantHooks = append(relevantHooks, hook)
- } else if hook.TriggerWhen == TRIGGERWORDS_STARTSWITH && hook.TriggerWordStartsWith(firstWord) {
+ triggerWord = hook.GetTriggerWord(firstWord, true)
+ } else if hook.TriggerWhen == TRIGGERWORDS_STARTS_WITH && hook.TriggerWordStartsWith(firstWord) {
relevantHooks = append(relevantHooks, hook)
+ triggerWord = hook.GetTriggerWord(firstWord, false)
}
}
}
for _, hook := range relevantHooks {
- go func(hook *model.OutgoingWebhook) {
- payload := &model.OutgoingWebhookPayload{
- Token: hook.Token,
- TeamId: hook.TeamId,
- TeamDomain: team.Name,
- ChannelId: post.ChannelId,
- ChannelName: channel.Name,
- Timestamp: post.CreateAt,
- UserId: post.UserId,
- UserName: user.Username,
- PostId: post.Id,
- Text: post.Message,
- TriggerWord: firstWord,
- }
- var body io.Reader
- var contentType string
- if hook.ContentType == "application/json" {
- body = strings.NewReader(payload.ToJSON())
- contentType = "application/json"
+ payload := &model.OutgoingWebhookPayload{
+ Token: hook.Token,
+ TeamId: hook.TeamId,
+ TeamDomain: team.Name,
+ ChannelId: post.ChannelId,
+ ChannelName: channel.Name,
+ Timestamp: post.CreateAt,
+ UserId: post.UserId,
+ UserName: user.Username,
+ PostId: post.Id,
+ Text: post.Message,
+ TriggerWord: triggerWord,
+ FileIds: strings.Join(post.FileIds, ","),
+ }
+ go TriggerWebhook(payload, hook, post)
+ }
+
+ return nil
+}
+
+func TriggerWebhook(payload *model.OutgoingWebhookPayload, hook *model.OutgoingWebhook, post *model.Post) {
+ var body io.Reader
+ var contentType string
+ if hook.ContentType == "application/json" {
+ body = strings.NewReader(payload.ToJSON())
+ contentType = "application/json"
+ } else {
+ body = strings.NewReader(payload.ToFormValues())
+ contentType = "application/x-www-form-urlencoded"
+ }
+
+ for _, url := range hook.CallbackURLs {
+ go func(url string) {
+ req, _ := http.NewRequest("POST", url, body)
+ req.Header.Set("Content-Type", contentType)
+ req.Header.Set("Accept", "application/json")
+ if resp, err := utils.HttpClient().Do(req); err != nil {
+ l4g.Error(utils.T("api.post.handle_webhook_events_and_forget.event_post.error"), err.Error())
} else {
- body = strings.NewReader(payload.ToFormValues())
- contentType = "application/x-www-form-urlencoded"
- }
+ defer CloseBody(resp)
+ respProps := model.MapFromJson(resp.Body)
- for _, url := range hook.CallbackURLs {
- go func(url string) {
- req, _ := http.NewRequest("POST", url, body)
- req.Header.Set("Content-Type", contentType)
- req.Header.Set("Accept", "application/json")
- if resp, err := utils.HttpClient().Do(req); err != nil {
- l4g.Error(utils.T("api.post.handle_webhook_events_and_forget.event_post.error"), err.Error())
- } else {
- defer CloseBody(resp)
- respProps := model.MapFromJson(resp.Body)
-
- if text, ok := respProps["text"]; ok {
- if _, err := CreateWebhookPost(hook.CreatorId, hook.TeamId, post.ChannelId, text, respProps["username"], respProps["icon_url"], post.Props, post.Type); err != nil {
- l4g.Error(utils.T("api.post.handle_webhook_events_and_forget.create_post.error"), err)
- }
- }
+ if text, ok := respProps["text"]; ok {
+ if _, err := CreateWebhookPost(hook.CreatorId, hook.TeamId, post.ChannelId, text, respProps["username"], respProps["icon_url"], post.Props, post.Type); err != nil {
+ l4g.Error(utils.T("api.post.handle_webhook_events_and_forget.create_post.error"), err)
}
- }(url)
+ }
}
-
- }(hook)
+ }(url)
}
-
- return nil
}
func CreateWebhookPost(userId, teamId, channelId, text, overrideUsername, overrideIconUrl string, props model.StringInterface, postType string) (*model.Post, *model.AppError) {
diff --git a/cmd/platform/channel.go b/cmd/platform/channel.go
index 53daf0f9a..0ff2a1b5f 100644
--- a/cmd/platform/channel.go
+++ b/cmd/platform/channel.go
@@ -231,7 +231,7 @@ func archiveChannelsCmdF(cmd *cobra.Command, args []string) error {
}
if len(args) < 1 {
- return errors.New("Enter at least one channel to delete.")
+ return errors.New("Enter at least one channel to archive.")
}
channels := getChannelsFromChannelArgs(args)
@@ -241,7 +241,7 @@ func archiveChannelsCmdF(cmd *cobra.Command, args []string) error {
continue
}
if result := <-app.Srv.Store.Channel().Delete(channel.Id, model.GetMillis()); result.Err != nil {
- CommandPrintErrorln("Unable to delete channel '" + channel.Name + "' error: " + result.Err.Error())
+ CommandPrintErrorln("Unable to archive channel '" + channel.Name + "' error: " + result.Err.Error())
}
}
diff --git a/cmd/platform/server.go b/cmd/platform/server.go
index 2eedbd54a..3413472da 100644
--- a/cmd/platform/server.go
+++ b/cmd/platform/server.go
@@ -120,13 +120,15 @@ func runServer(configFileLocation string) {
einterfaces.GetMetricsInterface().StartServer()
}
- if einterfaces.GetElasticSearchInterface() != nil {
- if err := einterfaces.GetElasticSearchInterface().Start(); err != nil {
+ if einterfaces.GetElasticsearchInterface() != nil {
+ if err := einterfaces.GetElasticsearchInterface().Start(); err != nil {
l4g.Error(err.Error())
}
}
- jobs := jobs.InitJobs(app.Srv.Store).Start()
+ jobs.Srv.Store = app.Srv.Store
+ jobs.Srv.StartWorkers()
+ jobs.Srv.StartSchedulers()
// wait for kill signal before attempting to gracefully shutdown
// the running service
@@ -142,7 +144,8 @@ func runServer(configFileLocation string) {
einterfaces.GetMetricsInterface().StopServer()
}
- jobs.Stop()
+ jobs.Srv.StopSchedulers()
+ jobs.Srv.StopWorkers()
app.StopServer()
}
diff --git a/config/config.json b/config/config.json
index a07cf5c19..6bcae680c 100644
--- a/config/config.json
+++ b/config/config.json
@@ -289,5 +289,9 @@
},
"DataRetentionSettings": {
"Enable": false
+ },
+ "JobSettings": {
+ "RunJobs": true,
+ "RunScheduler": true
}
}
diff --git a/einterfaces/elasticsearch.go b/einterfaces/elasticsearch.go
index af89b38a5..f5babffe0 100644
--- a/einterfaces/elasticsearch.go
+++ b/einterfaces/elasticsearch.go
@@ -5,20 +5,20 @@ package einterfaces
import "github.com/mattermost/platform/model"
-type ElasticSearchInterface interface {
+type ElasticsearchInterface interface {
Start() *model.AppError
- IndexPost(post *model.Post, teamId string)
+ IndexPost(post *model.Post, teamId string) *model.AppError
SearchPosts(channels *model.ChannelList, searchParams []*model.SearchParams) ([]string, *model.AppError)
- DeletePost(postId string)
- TestConfig() *model.AppError
+ DeletePost(postId string) *model.AppError
+ TestConfig(cfg *model.Config) *model.AppError
}
-var theElasticSearchInterface ElasticSearchInterface
+var theElasticsearchInterface ElasticsearchInterface
-func RegisterElasticSearchInterface(newInterface ElasticSearchInterface) {
- theElasticSearchInterface = newInterface
+func RegisterElasticsearchInterface(newInterface ElasticsearchInterface) {
+ theElasticsearchInterface = newInterface
}
-func GetElasticSearchInterface() ElasticSearchInterface {
- return theElasticSearchInterface
+func GetElasticsearchInterface() ElasticsearchInterface {
+ return theElasticsearchInterface
}
diff --git a/einterfaces/jobs/data_retention.go b/einterfaces/jobs/data_retention.go
index 340ed1b88..442f667fa 100644
--- a/einterfaces/jobs/data_retention.go
+++ b/einterfaces/jobs/data_retention.go
@@ -5,11 +5,11 @@ package jobs
import (
"github.com/mattermost/platform/model"
- "github.com/mattermost/platform/store"
)
type DataRetentionInterface interface {
- MakeJob(store store.Store) model.Job
+ MakeWorker() model.Worker
+ MakeScheduler() model.Scheduler
}
var theDataRetentionInterface DataRetentionInterface
diff --git a/einterfaces/jobs/elasticsearch.go b/einterfaces/jobs/elasticsearch.go
new file mode 100644
index 000000000..6d6dbe893
--- /dev/null
+++ b/einterfaces/jobs/elasticsearch.go
@@ -0,0 +1,22 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package jobs
+
+import (
+ "github.com/mattermost/platform/model"
+)
+
+type ElasticsearchIndexerInterface interface {
+ MakeWorker() model.Worker
+}
+
+var theElasticsearchIndexerInterface ElasticsearchIndexerInterface
+
+func RegisterElasticsearchIndexerInterface(newInterface ElasticsearchIndexerInterface) {
+ theElasticsearchIndexerInterface = newInterface
+}
+
+func GetElasticsearchIndexerInterface() ElasticsearchIndexerInterface {
+ return theElasticsearchIndexerInterface
+}
diff --git a/i18n/en.json b/i18n/en.json
index e3f7eaacb..0ba3194fa 100644
--- a/i18n/en.json
+++ b/i18n/en.json
@@ -3496,6 +3496,18 @@
"translation": "Compliance export started for job '{{.JobName}}' at '{{.FilePath}}'"
},
{
+ "id": "ent.elasticsearch.delete_post.error",
+ "translation": "Failed to delete the post"
+ },
+ {
+ "id": "ent.elasticsearch.index_post.error",
+ "translation": "Failed to index the post"
+ },
+ {
+ "id": "ent.elasticsearch.not_started.error",
+ "translation": "Elasticsearch is not started"
+ },
+ {
"id": "ent.elasticsearch.search_posts.disabled",
"translation": "ElasticSearch searching is disabled on this server"
},
@@ -3508,19 +3520,19 @@
"translation": "Failed to decode search results"
},
{
- "id": "ent.elasticsearch.start.connect_failed",
+ "id": "ent.elasticsearch.create_client.connect_failed",
"translation": "Setting up ElasticSearch Client Failed"
},
{
- "id": "ent.elasticsearch.start.index_create_failed",
+ "id": "ent.elasticsearch.create_index_if_not_exists.index_create_failed",
"translation": "Failed to create ElasticSearch index"
},
{
- "id": "ent.elasticsearch.start.index_exists_failed",
+ "id": "ent.elasticsearch.create_index_if_not_exists.index_exists_failed",
"translation": "Failed to establish whether ElasticSearch index exists"
},
{
- "id": "ent.elasticsearch.start.index_mapping_failed",
+ "id": "ent.elasticsearch.create_index_if_not_exists.index_mapping_failed",
"translation": "Failed to setup ElasticSearch index mapping"
},
{
@@ -3528,6 +3540,10 @@
"translation": "Failed to set ElasticSearch index settings"
},
{
+ "id": "ent.elasticsearch.test_config.connect_failed",
+ "translation": "Connecting to Elasticsearch server failed."
+ },
+ {
"id": "ent.elasticsearch.test_config.indexing_disabled.error",
"translation": "Elasticsearch is disabled."
},
@@ -3536,6 +3552,10 @@
"translation": "License does not support Elasticsearch."
},
{
+ "id": "ent.elasticsearch.test_config.reenter_password",
+ "translation": "The Elasticsearch Server URL or Username has changed. Please re-enter the Elasticsearch password to test connection."
+ },
+ {
"id": "ent.emoji.licence_disable.app_error",
"translation": "Custom emoji restrictions disabled by current license. Please contact your system administrator about upgrading your enterprise license."
},
@@ -3748,6 +3768,14 @@
"translation": "Page not found"
},
{
+ "id": "jobs.set_job_error.update.error",
+ "translation": "Failed to set job status to error"
+ },
+ {
+ "id": "jobs.request_cancellation.status.error",
+ "translation": "Could not request cancellation for job that is not in a cancelable state."
+ },
+ {
"id": "manaultesting.get_channel_id.no_found.debug",
"translation": "Could not find channel: %v, %v possibilities searched"
},
@@ -5296,24 +5324,24 @@
"translation": "We couldn't save or update the file info"
},
{
- "id": "store.sql_job_status.delete_by_type.app_error",
- "translation": "We couldn't delete the job status"
+ "id": "store.sql_job.delete.app_error",
+ "translation": "We couldn't delete the job"
},
{
- "id": "store.sql_job_status.get.app_error",
- "translation": "We couldn't get the job status"
+ "id": "store.sql_job.get.app_error",
+ "translation": "We couldn't get the job"
},
{
- "id": "store.sql_job_status.get_all.app_error",
- "translation": "We couldn't get all job statuses"
+ "id": "store.sql_job.get_all.app_error",
+ "translation": "We couldn't get the jobs"
},
{
- "id": "store.sql_job_status.save.app_error",
- "translation": "We couldn't save the job status"
+ "id": "store.sql_job.save.app_error",
+ "translation": "We couldn't save the job"
},
{
- "id": "store.sql_job_status.update.app_error",
- "translation": "We couldn't update the job status"
+ "id": "store.sql_job.update.app_error",
+ "translation": "We couldn't update the job"
},
{
"id": "store.sql_license.get.app_error",
@@ -5464,6 +5492,10 @@
"translation": "We couldn't get the parent posts for the channel"
},
{
+ "id": "store.sql_post.get_posts_batch_for_indexing.get.app_error",
+ "translation": "We couldn't get the posts batch for indexing"
+ },
+ {
"id": "store.sql_post.get_posts_by_ids.app_error",
"translation": "We couldn't get the posts"
},
diff --git a/jobs/jobs.go b/jobs/jobs.go
index 8c84f4eea..9247355d0 100644
--- a/jobs/jobs.go
+++ b/jobs/jobs.go
@@ -4,71 +4,125 @@
package jobs
import (
- "sync"
+ "context"
+ "time"
l4g "github.com/alecthomas/log4go"
- ejobs "github.com/mattermost/platform/einterfaces/jobs"
"github.com/mattermost/platform/model"
- "github.com/mattermost/platform/store"
- "github.com/mattermost/platform/utils"
+ "net/http"
)
-type Jobs struct {
- startOnce sync.Once
+const (
+ CANCEL_WATCHER_POLLING_INTERVAL = 5000
+)
- DataRetention model.Job
- // SearchIndexing model.Job
+func CreateJob(jobType string, jobData map[string]interface{}) (*model.Job, *model.AppError) {
+ job := model.Job{
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: model.GetMillis(),
+ Status: model.JOB_STATUS_PENDING,
+ Data: jobData,
+ }
- listenerId string
+ if result := <-Srv.Store.Job().Save(&job); result.Err != nil {
+ return nil, result.Err
+ }
+
+ return &job, nil
}
-func InitJobs(s store.Store) *Jobs {
- jobs := &Jobs{
- // SearchIndexing: MakeTestJob(s, "SearchIndexing"),
+func ClaimJob(job *model.Job) (bool, *model.AppError) {
+ if result := <-Srv.Store.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_PENDING, model.JOB_STATUS_IN_PROGRESS); result.Err != nil {
+ return false, result.Err
+ } else {
+ success := result.Data.(bool)
+ return success, nil
}
+}
- if dataRetentionInterface := ejobs.GetDataRetentionInterface(); dataRetentionInterface != nil {
- jobs.DataRetention = dataRetentionInterface.MakeJob(s)
+func SetJobProgress(job *model.Job, progress int64) (*model.AppError) {
+ job.Status = model.JOB_STATUS_IN_PROGRESS
+ job.Progress = progress
+
+ if result := <-Srv.Store.Job().UpdateOptimistically(job, model.JOB_STATUS_IN_PROGRESS); result.Err != nil {
+ return result.Err
+ } else {
+ return nil
}
+}
- return jobs
+func SetJobSuccess(job *model.Job) *model.AppError {
+ result := <-Srv.Store.Job().UpdateStatus(job.Id, model.JOB_STATUS_SUCCESS)
+ return result.Err
}
-func (jobs *Jobs) Start() *Jobs {
- l4g.Info("Starting jobs")
+func SetJobError(job *model.Job, jobError *model.AppError) *model.AppError {
+ if jobError == nil {
+ result := <-Srv.Store.Job().UpdateStatus(job.Id, model.JOB_STATUS_ERROR)
+ return result.Err
+ }
- jobs.startOnce.Do(func() {
- if jobs.DataRetention != nil && *utils.Cfg.DataRetentionSettings.Enable {
- go jobs.DataRetention.Run()
+ job.Status = model.JOB_STATUS_ERROR
+ job.Progress = -1
+ if job.Data == nil {
+ job.Data = make(map[string]interface{})
+ }
+ job.Data["error"] = jobError
+
+ if result := <-Srv.Store.Job().UpdateOptimistically(job, model.JOB_STATUS_IN_PROGRESS); result.Err != nil {
+ return result.Err
+ } else {
+ if !result.Data.(bool) {
+ if result := <-Srv.Store.Job().UpdateOptimistically(job, model.JOB_STATUS_CANCEL_REQUESTED); result.Err != nil {
+ return result.Err
+ } else {
+ if !result.Data.(bool) {
+ return model.NewAppError("Jobs.SetJobError", "jobs.set_job_error.update.error", nil, "id=" + job.Id, http.StatusInternalServerError)
+ }
+ }
}
+ }
- // go jobs.SearchIndexing.Run()
- })
-
- jobs.listenerId = utils.AddConfigListener(jobs.handleConfigChange)
-
- return jobs
+ return nil
}
-func (jobs *Jobs) handleConfigChange(oldConfig *model.Config, newConfig *model.Config) {
- if jobs.DataRetention != nil {
- if !*oldConfig.DataRetentionSettings.Enable && *newConfig.DataRetentionSettings.Enable {
- go jobs.DataRetention.Run()
- } else if *oldConfig.DataRetentionSettings.Enable && !*newConfig.DataRetentionSettings.Enable {
- jobs.DataRetention.Stop()
- }
- }
+func SetJobCanceled(job *model.Job) *model.AppError {
+ result := <-Srv.Store.Job().UpdateStatus(job.Id, model.JOB_STATUS_CANCELED)
+ return result.Err
}
-func (jobs *Jobs) Stop() *Jobs {
- utils.RemoveConfigListener(jobs.listenerId)
+func RequestCancellation(job *model.Job) *model.AppError {
+ if result := <-Srv.Store.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_PENDING, model.JOB_STATUS_CANCELED); result.Err != nil {
+ return result.Err
+ } else if result.Data.(bool) {
+ return nil
+ }
- if jobs.DataRetention != nil && *utils.Cfg.DataRetentionSettings.Enable {
- jobs.DataRetention.Stop()
+ if result := <-Srv.Store.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_IN_PROGRESS, model.JOB_STATUS_CANCEL_REQUESTED); result.Err != nil {
+ return result.Err
+ } else if result.Data.(bool) {
+ return nil
}
- // jobs.SearchIndexing.Stop()
- l4g.Info("Stopped jobs")
+ return model.NewAppError("Jobs.RequestCancellation", "jobs.request_cancellation.status.error", nil, "id=" + job.Id, http.StatusInternalServerError)
+}
- return jobs
+func CancellationWatcher(ctx context.Context, jobId string, cancelChan chan interface{}) {
+ for {
+ select {
+ case <-ctx.Done():
+ l4g.Debug("CancellationWatcher for Job: %v Aborting as job has finished.", jobId)
+ return
+ case <-time.After(CANCEL_WATCHER_POLLING_INTERVAL * time.Millisecond):
+ l4g.Debug("CancellationWatcher for Job: %v polling.", jobId)
+ if result := <-Srv.Store.Job().Get(jobId); result.Err == nil {
+ jobStatus := result.Data.(*model.Job)
+ if jobStatus.Status == model.JOB_STATUS_CANCEL_REQUESTED {
+ close(cancelChan)
+ return
+ }
+ }
+ }
+ }
}
diff --git a/jobs/jobs_watcher.go b/jobs/jobs_watcher.go
new file mode 100644
index 000000000..5979d6207
--- /dev/null
+++ b/jobs/jobs_watcher.go
@@ -0,0 +1,92 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package jobs
+
+import (
+ "math/rand"
+ "time"
+
+ l4g "github.com/alecthomas/log4go"
+ "github.com/mattermost/platform/model"
+)
+
+const (
+ WATCHER_POLLING_INTERVAL = 15000
+)
+
+type Watcher struct {
+ workers *Workers
+
+ stop chan bool
+ stopped chan bool
+}
+
+func MakeWatcher(workers *Workers) *Watcher {
+ return &Watcher{
+ stop: make(chan bool, 1),
+ stopped: make(chan bool, 1),
+ workers: workers,
+ }
+}
+
+func (watcher *Watcher) Start() {
+ l4g.Debug("Watcher Started")
+
+ // Delay for some random number of milliseconds before starting to ensure that multiple
+ // instances of the jobserver don't poll at a time too close to each other.
+ rand.Seed(time.Now().UTC().UnixNano())
+ _ = <-time.After(time.Duration(rand.Intn(WATCHER_POLLING_INTERVAL)) * time.Millisecond)
+
+ defer func(){
+ l4g.Debug("Watcher Finished")
+ watcher.stopped <- true
+ }()
+
+ for {
+ select {
+ case <-watcher.stop:
+ l4g.Debug("Watcher: Received stop signal")
+ return
+ case <-time.After(WATCHER_POLLING_INTERVAL * time.Millisecond):
+ watcher.PollAndNotify()
+ }
+ }
+}
+
+func (watcher *Watcher) Stop() {
+ l4g.Debug("Watcher Stopping")
+ watcher.stop <- true
+ <-watcher.stopped
+}
+
+func (watcher *Watcher) PollAndNotify() {
+ if result := <-Srv.Store.Job().GetAllByStatus(model.JOB_STATUS_PENDING); result.Err != nil {
+ l4g.Error("Error occured getting all pending statuses: %v", result.Err.Error())
+ } else {
+ jobStatuses := result.Data.([]*model.Job)
+
+ for _, js := range jobStatuses {
+ j := model.Job{
+ Type: js.Type,
+ Id: js.Id,
+ }
+
+ if js.Type == model.JOB_TYPE_DATA_RETENTION {
+ if watcher.workers.DataRetention != nil {
+ select {
+ case watcher.workers.DataRetention.JobChannel() <- j:
+ default:
+ }
+ }
+ } else if js.Type == model.JOB_TYPE_SEARCH_INDEXING {
+ if watcher.workers.ElasticsearchIndexing != nil {
+ select {
+ case watcher.workers.ElasticsearchIndexing.JobChannel() <- j:
+ default:
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/jobs/jobserver/jobserver.go b/jobs/jobserver/jobserver.go
index 5f491a815..aabe5d3b2 100644
--- a/jobs/jobserver/jobserver.go
+++ b/jobs/jobserver/jobserver.go
@@ -16,22 +16,20 @@ import (
_ "github.com/mattermost/platform/imports"
)
-var Srv jobs.JobServer
-
func main() {
// Initialize
utils.InitAndLoadConfig("config.json")
defer l4g.Close()
- Srv.Store = store.NewLayeredStore()
- defer Srv.Store.Close()
+ jobs.Srv.Store = store.NewLayeredStore()
+ defer jobs.Srv.Store.Close()
- Srv.LoadLicense()
+ jobs.Srv.LoadLicense()
// Run jobs
l4g.Info("Starting Mattermost job server")
- Srv.Jobs = jobs.InitJobs(Srv.Store)
- Srv.Jobs.Start()
+ jobs.Srv.StartWorkers()
+ jobs.Srv.StartSchedulers()
var signalChan chan os.Signal = make(chan os.Signal)
signal.Notify(signalChan, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)
@@ -40,7 +38,8 @@ func main() {
// Cleanup anything that isn't handled by a defer statement
l4g.Info("Stopping Mattermost job server")
- Srv.Jobs.Stop()
+ jobs.Srv.StopSchedulers()
+ jobs.Srv.StopWorkers()
l4g.Info("Stopped Mattermost job server")
}
diff --git a/jobs/schedulers.go b/jobs/schedulers.go
new file mode 100644
index 000000000..73ec6661a
--- /dev/null
+++ b/jobs/schedulers.go
@@ -0,0 +1,68 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package jobs
+
+import (
+ "sync"
+
+ l4g "github.com/alecthomas/log4go"
+ ejobs "github.com/mattermost/platform/einterfaces/jobs"
+
+ "github.com/mattermost/platform/model"
+ "github.com/mattermost/platform/utils"
+)
+
+type Schedulers struct {
+ startOnce sync.Once
+
+ DataRetention model.Scheduler
+
+ listenerId string
+}
+
+func InitSchedulers() *Schedulers {
+ schedulers := &Schedulers{}
+
+ if dataRetentionInterface := ejobs.GetDataRetentionInterface(); dataRetentionInterface != nil {
+ schedulers.DataRetention = dataRetentionInterface.MakeScheduler()
+ }
+
+ return schedulers
+}
+
+func (schedulers *Schedulers) Start() *Schedulers {
+ l4g.Info("Starting schedulers")
+
+ schedulers.startOnce.Do(func() {
+ if schedulers.DataRetention != nil && *utils.Cfg.DataRetentionSettings.Enable {
+ go schedulers.DataRetention.Run()
+ }
+ })
+
+ schedulers.listenerId = utils.AddConfigListener(schedulers.handleConfigChange)
+
+ return schedulers
+}
+
+func (schedulers *Schedulers) handleConfigChange(oldConfig *model.Config, newConfig *model.Config) {
+ if schedulers.DataRetention != nil {
+ if !*oldConfig.DataRetentionSettings.Enable && *newConfig.DataRetentionSettings.Enable {
+ go schedulers.DataRetention.Run()
+ } else if *oldConfig.DataRetentionSettings.Enable && !*newConfig.DataRetentionSettings.Enable {
+ schedulers.DataRetention.Stop()
+ }
+ }
+}
+
+func (schedulers *Schedulers) Stop() *Schedulers {
+ utils.RemoveConfigListener(schedulers.listenerId)
+
+ if schedulers.DataRetention != nil && *utils.Cfg.DataRetentionSettings.Enable {
+ schedulers.DataRetention.Stop()
+ }
+
+ l4g.Info("Stopped schedulers")
+
+ return schedulers
+}
diff --git a/jobs/server.go b/jobs/server.go
index dd3448842..7920cb2d5 100644
--- a/jobs/server.go
+++ b/jobs/server.go
@@ -11,10 +11,13 @@ import (
)
type JobServer struct {
- Store store.Store
- Jobs *Jobs
+ Store store.Store
+ Workers *Workers
+ Schedulers *Schedulers
}
+var Srv JobServer
+
func (server *JobServer) LoadLicense() {
licenseId := ""
if result := <-server.Store.System().Get(); result.Err == nil {
@@ -44,3 +47,27 @@ func (server *JobServer) LoadLicense() {
l4g.Info(utils.T("mattermost.load_license.find.warn"))
}
}
+
+func (server *JobServer) StartWorkers() {
+ if *utils.Cfg.JobSettings.RunJobs {
+ Srv.Workers = InitWorkers().Start()
+ }
+}
+
+func (server *JobServer) StartSchedulers() {
+ if *utils.Cfg.JobSettings.RunJobs {
+ Srv.Schedulers = InitSchedulers().Start()
+ }
+}
+
+func (server *JobServer) StopWorkers() {
+ if Srv.Workers != nil {
+ Srv.Workers.Stop()
+ }
+}
+
+func (server *JobServer) StopSchedulers() {
+ if Srv.Schedulers != nil {
+ Srv.Schedulers.Stop()
+ }
+}
diff --git a/jobs/testjob.go b/jobs/testjob.go
deleted file mode 100644
index 59d5274e5..000000000
--- a/jobs/testjob.go
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
-// See License.txt for license information.
-
-package jobs
-
-import (
- "time"
-
- l4g "github.com/alecthomas/log4go"
- "github.com/mattermost/platform/store"
-)
-
-type TestJob struct {
- store store.Store
-
- name string
- stop chan bool
- stopped chan bool
-}
-
-func MakeTestJob(s store.Store, name string) *TestJob {
- return &TestJob{
- store: s,
- name: name,
- stop: make(chan bool, 1),
- stopped: make(chan bool, 1),
- }
-}
-
-func (job *TestJob) Run() {
- l4g.Debug("Job %v: Started", job.name)
-
- running := true
- for running {
- l4g.Debug("Job %v: Tick", job.name)
-
- select {
- case <-job.stop:
- l4g.Debug("Job %v: Received stop signal", job.name)
- running = false
- case <-time.After(10 * time.Second):
- continue
- }
- }
-
- l4g.Debug("Job %v: Finished", job.name)
- job.stopped <- true
-}
-
-func (job *TestJob) Stop() {
- l4g.Debug("Job %v: Stopping", job.name)
- job.stop <- true
- <-job.stopped
-}
diff --git a/jobs/testscheduler.go b/jobs/testscheduler.go
new file mode 100644
index 000000000..31b5d144c
--- /dev/null
+++ b/jobs/testscheduler.go
@@ -0,0 +1,58 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package jobs
+
+import (
+ "time"
+
+ l4g "github.com/alecthomas/log4go"
+)
+
+type TestScheduler struct {
+ name string
+ jobType string
+ stop chan bool
+ stopped chan bool
+}
+
+func MakeTestScheduler(name string, jobType string) *TestScheduler {
+ return &TestScheduler{
+ name: name,
+ jobType: jobType,
+ stop: make(chan bool, 1),
+ stopped: make(chan bool, 1),
+ }
+}
+
+func (scheduler *TestScheduler) Run() {
+ l4g.Debug("Scheduler %v: Started", scheduler.name)
+
+ defer func(){
+ l4g.Debug("Scheduler %v: Finished", scheduler.name)
+ scheduler.stopped <- true
+ }()
+
+ for {
+ select {
+ case <-scheduler.stop:
+ l4g.Debug("Scheduler %v: Received stop signal", scheduler.name)
+ return
+ case <-time.After(86400 * time.Second):
+ l4g.Debug("Scheduler: %v: Scheduling new job", scheduler.name)
+ scheduler.AddJob()
+ }
+ }
+}
+
+func (scheduler *TestScheduler) AddJob() {
+ if _, err := CreateJob(scheduler.jobType, nil); err != nil {
+ l4g.Error("Scheduler %v: failed to create job: %v", scheduler.name, err)
+ }
+}
+
+func (scheduler *TestScheduler) Stop() {
+ l4g.Debug("Scheduler %v: Stopping", scheduler.name)
+ scheduler.stop <- true
+ <-scheduler.stopped
+}
diff --git a/jobs/testworker.go b/jobs/testworker.go
new file mode 100644
index 000000000..385a2073b
--- /dev/null
+++ b/jobs/testworker.go
@@ -0,0 +1,104 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package jobs
+
+import (
+ "context"
+ "time"
+
+ l4g "github.com/alecthomas/log4go"
+ "github.com/mattermost/platform/model"
+)
+
+type TestWorker struct {
+ name string
+ stop chan bool
+ stopped chan bool
+ jobs chan model.Job
+}
+
+func MakeTestWorker(name string) *TestWorker {
+ return &TestWorker{
+ name: name,
+ stop: make(chan bool, 1),
+ stopped: make(chan bool, 1),
+ jobs: make(chan model.Job),
+ }
+}
+
+func (worker *TestWorker) Run() {
+ l4g.Debug("Worker %v: Started", worker.name)
+
+ defer func() {
+ l4g.Debug("Worker %v: Finished", worker.name)
+ worker.stopped <- true
+ }()
+
+ for {
+ select {
+ case <-worker.stop:
+ l4g.Debug("Worker %v: Received stop signal", worker.name)
+ return
+ case job := <-worker.jobs:
+ l4g.Debug("Worker %v: Received a new candidate job.", worker.name)
+ worker.DoJob(&job)
+ }
+ }
+}
+
+func (worker *TestWorker) DoJob(job *model.Job) {
+ if claimed, err := ClaimJob(job); err != nil {
+ l4g.Error("Job: %v: Error occurred while trying to claim job: %v", job.Id, err.Error())
+ return
+ } else if !claimed {
+ return
+ }
+
+ cancelCtx, cancelCancelWatcher := context.WithCancel(context.Background())
+ cancelWatcherChan := make(chan interface{}, 1)
+ go CancellationWatcher(cancelCtx, job.Id, cancelWatcherChan)
+
+ defer cancelCancelWatcher()
+
+ counter := 0
+ for {
+ select {
+ case <-cancelWatcherChan:
+ l4g.Debug("Job %v: Job has been canceled via CancellationWatcher.", job.Id)
+ if err := SetJobCanceled(job); err != nil {
+ l4g.Error("Failed to mark job: %v as canceled. Error: %v", job.Id, err.Error())
+ }
+ return
+ case <-worker.stop:
+ l4g.Debug("Job %v: Job has been canceled via Worker Stop.", job.Id)
+ if err := SetJobCanceled(job); err != nil {
+ l4g.Error("Failed to mark job: %v as canceled. Error: %v", job.Id, err.Error())
+ }
+ return
+ case <-time.After(5 * time.Second):
+ counter++
+ if counter > 10 {
+ l4g.Debug("Job %v: Job completed.", job.Id)
+ if err := SetJobSuccess(job); err != nil {
+ l4g.Error("Failed to mark job: %v as succeeded. Error: %v", job.Id, err.Error())
+ }
+ return
+ } else {
+ if err := SetJobProgress(job, int64(counter*10)); err != nil {
+ l4g.Error("Job: %v: an error occured while trying to set job progress: %v", job.Id, err.Error())
+ }
+ }
+ }
+ }
+}
+
+func (worker *TestWorker) Stop() {
+ l4g.Debug("Worker %v: Stopping", worker.name)
+ worker.stop <- true
+ <-worker.stopped
+}
+
+func (worker *TestWorker) JobChannel() chan<- model.Job {
+ return worker.jobs
+}
diff --git a/jobs/workers.go b/jobs/workers.go
new file mode 100644
index 000000000..bb80ad79a
--- /dev/null
+++ b/jobs/workers.go
@@ -0,0 +1,96 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package jobs
+
+import (
+ "sync"
+
+ l4g "github.com/alecthomas/log4go"
+ ejobs "github.com/mattermost/platform/einterfaces/jobs"
+ "github.com/mattermost/platform/model"
+ "github.com/mattermost/platform/utils"
+)
+
+type Workers struct {
+ startOnce sync.Once
+ watcher *Watcher
+
+ DataRetention model.Worker
+ ElasticsearchIndexing model.Worker
+
+ listenerId string
+}
+
+func InitWorkers() *Workers {
+ workers := &Workers{
+ // SearchIndexing: MakeTestJob(s, "SearchIndexing"),
+ }
+ workers.watcher = MakeWatcher(workers)
+
+ if dataRetentionInterface := ejobs.GetDataRetentionInterface(); dataRetentionInterface != nil {
+ workers.DataRetention = dataRetentionInterface.MakeWorker()
+ }
+
+ if elasticsearchIndexerInterface := ejobs.GetElasticsearchIndexerInterface(); elasticsearchIndexerInterface != nil {
+ workers.ElasticsearchIndexing = elasticsearchIndexerInterface.MakeWorker()
+ }
+
+ return workers
+}
+
+func (workers *Workers) Start() *Workers {
+ l4g.Info("Starting workers")
+
+ workers.startOnce.Do(func() {
+ if workers.DataRetention != nil && *utils.Cfg.DataRetentionSettings.Enable {
+ go workers.DataRetention.Run()
+ }
+
+ if workers.ElasticsearchIndexing != nil && *utils.Cfg.ElasticSearchSettings.EnableIndexing {
+ go workers.ElasticsearchIndexing.Run()
+ }
+
+ go workers.watcher.Start()
+ })
+
+ workers.listenerId = utils.AddConfigListener(workers.handleConfigChange)
+
+ return workers
+}
+
+func (workers *Workers) handleConfigChange(oldConfig *model.Config, newConfig *model.Config) {
+ if workers.DataRetention != nil {
+ if !*oldConfig.DataRetentionSettings.Enable && *newConfig.DataRetentionSettings.Enable {
+ go workers.DataRetention.Run()
+ } else if *oldConfig.DataRetentionSettings.Enable && !*newConfig.DataRetentionSettings.Enable {
+ workers.DataRetention.Stop()
+ }
+ }
+
+ if workers.ElasticsearchIndexing != nil {
+ if !*oldConfig.ElasticSearchSettings.EnableIndexing && *newConfig.ElasticSearchSettings.EnableIndexing {
+ go workers.ElasticsearchIndexing.Run()
+ } else if *oldConfig.ElasticSearchSettings.EnableIndexing && !*newConfig.ElasticSearchSettings.EnableIndexing {
+ workers.ElasticsearchIndexing.Stop()
+ }
+ }
+}
+
+func (workers *Workers) Stop() *Workers {
+ utils.RemoveConfigListener(workers.listenerId)
+
+ workers.watcher.Stop()
+
+ if workers.DataRetention != nil && *utils.Cfg.DataRetentionSettings.Enable {
+ workers.DataRetention.Stop()
+ }
+
+ if workers.ElasticsearchIndexing != nil && *utils.Cfg.ElasticSearchSettings.EnableIndexing {
+ workers.ElasticsearchIndexing.Stop()
+ }
+
+ l4g.Info("Stopped workers")
+
+ return workers
+}
diff --git a/model/client4.go b/model/client4.go
index 9ecdf9a62..a19a17d3a 100644
--- a/model/client4.go
+++ b/model/client4.go
@@ -1809,7 +1809,7 @@ func (c *Client4) GetFileInfosForPost(postId string, etag string) ([]*FileInfo,
// GetPing will return ok if the running goRoutines are below the threshold and unhealthy for above.
func (c *Client4) GetPing() (string, *Response) {
- if r, err := c.DoApiGet(c.GetSystemRoute()+"/ping", ""); r.StatusCode == 500 {
+ if r, err := c.DoApiGet(c.GetSystemRoute()+"/ping", ""); r != nil && r.StatusCode == 500 {
defer r.Body.Close()
return "unhealthy", BuildErrorResponse(r, err)
} else if err != nil {
@@ -2788,22 +2788,22 @@ func (c *Client4) OpenGraph(url string) (map[string]string, *Response) {
// Jobs Section
-// GetJobStatus gets the status of a single job.
-func (c *Client4) GetJobStatus(id string) (*JobStatus, *Response) {
+// GetJob gets a single job.
+func (c *Client4) GetJob(id string) (*Job, *Response) {
if r, err := c.DoApiGet(c.GetJobsRoute()+fmt.Sprintf("/%v/status", id), ""); err != nil {
return nil, BuildErrorResponse(r, err)
} else {
defer closeBody(r)
- return JobStatusFromJson(r.Body), BuildResponse(r)
+ return JobFromJson(r.Body), BuildResponse(r)
}
}
-// GetJobStatusesByType gets the status of all jobs of a given type, sorted with the job that most recently started first.
-func (c *Client4) GetJobStatusesByType(jobType string, page int, perPage int) ([]*JobStatus, *Response) {
+// GetJobsByType gets all jobs of a given type, sorted with the job that most recently started first.
+func (c *Client4) GetJobsByType(jobType string, page int, perPage int) ([]*Job, *Response) {
if r, err := c.DoApiGet(c.GetJobsRoute()+fmt.Sprintf("/type/%v/statuses?page=%v&per_page=%v", jobType, page, perPage), ""); err != nil {
return nil, BuildErrorResponse(r, err)
} else {
defer closeBody(r)
- return JobStatusesFromJson(r.Body), BuildResponse(r)
+ return JobsFromJson(r.Body), BuildResponse(r)
}
}
diff --git a/model/command_response.go b/model/command_response.go
index 1b2e06cdf..e3253acc0 100644
--- a/model/command_response.go
+++ b/model/command_response.go
@@ -5,7 +5,6 @@ package model
import (
"encoding/json"
- "fmt"
"io"
)
@@ -15,12 +14,12 @@ const (
)
type CommandResponse struct {
- ResponseType string `json:"response_type"`
- Text string `json:"text"`
- Username string `json:"username"`
- IconURL string `json:"icon_url"`
- GotoLocation string `json:"goto_location"`
- Attachments []*SlackAttachment `json:"attachments"`
+ ResponseType string `json:"response_type"`
+ Text string `json:"text"`
+ Username string `json:"username"`
+ IconURL string `json:"icon_url"`
+ GotoLocation string `json:"goto_location"`
+ Attachments SlackAttachments `json:"attachments"`
}
func (o *CommandResponse) ToJson() string {
@@ -40,14 +39,8 @@ func CommandResponseFromJson(data io.Reader) *CommandResponse {
return nil
}
- // Ensure attachment fields are stored as strings
- for _, attachment := range o.Attachments {
- for _, field := range attachment.Fields {
- if field.Value != nil {
- field.Value = fmt.Sprintf("%v", field.Value)
- }
- }
- }
+ o.Text = ExpandAnnouncement(o.Text)
+ o.Attachments.Process()
return &o
}
diff --git a/model/command_response_test.go b/model/command_response_test.go
index b57a77608..df478ff2c 100644
--- a/model/command_response_test.go
+++ b/model/command_response_test.go
@@ -82,3 +82,17 @@ func TestCommandResponseFromJson(t *testing.T) {
t.Fatal("should've received correct second attachment value")
}
}
+
+func TestCommandResponseNullArrayItems(t *testing.T) {
+ payload := `{"attachments":[{"fields":[{"title":"foo","value":"bar","short":true}, null]}, null]}`
+ cr := CommandResponseFromJson(strings.NewReader(payload))
+ if cr == nil {
+ t.Fatal("CommandResponse should not be nil")
+ }
+ if len(cr.Attachments) != 1 {
+ t.Fatalf("expected one attachment")
+ }
+ if len(cr.Attachments[0].Fields) != 1 {
+ t.Fatalf("expected one field")
+ }
+}
diff --git a/model/config.go b/model/config.go
index 38d27d8bb..5add720d3 100644
--- a/model/config.go
+++ b/model/config.go
@@ -436,6 +436,11 @@ type DataRetentionSettings struct {
Enable *bool
}
+type JobSettings struct {
+ RunJobs *bool
+ RunScheduler *bool
+}
+
type Config struct {
ServiceSettings ServiceSettings
TeamSettings TeamSettings
@@ -462,6 +467,7 @@ type Config struct {
WebrtcSettings WebrtcSettings
ElasticSearchSettings ElasticSearchSettings
DataRetentionSettings DataRetentionSettings
+ JobSettings JobSettings
}
func (o *Config) ToJson() string {
@@ -1380,6 +1386,16 @@ func (o *Config) SetDefaults() {
*o.DataRetentionSettings.Enable = false
}
+ if o.JobSettings.RunJobs == nil {
+ o.JobSettings.RunJobs = new(bool)
+ *o.JobSettings.RunJobs = true
+ }
+
+ if o.JobSettings.RunScheduler == nil {
+ o.JobSettings.RunScheduler = new(bool)
+ *o.JobSettings.RunScheduler = true
+ }
+
o.defaultWebrtcSettings()
}
@@ -1646,8 +1662,6 @@ func (o *Config) Sanitize() {
o.SqlSettings.DataSourceSearchReplicas[i] = FAKE_SETTING
}
- *o.ElasticSearchSettings.ConnectionUrl = FAKE_SETTING
- *o.ElasticSearchSettings.Username = FAKE_SETTING
*o.ElasticSearchSettings.Password = FAKE_SETTING
}
diff --git a/model/incoming_webhook.go b/model/incoming_webhook.go
index 2235cb2c6..4ebc7e8b1 100644
--- a/model/incoming_webhook.go
+++ b/model/incoming_webhook.go
@@ -6,10 +6,8 @@ package model
import (
"bytes"
"encoding/json"
- "fmt"
"io"
"regexp"
- "strings"
)
const (
@@ -29,13 +27,13 @@ type IncomingWebhook struct {
}
type IncomingWebhookRequest struct {
- Text string `json:"text"`
- Username string `json:"username"`
- IconURL string `json:"icon_url"`
- ChannelName string `json:"channel"`
- Props StringInterface `json:"props"`
- Attachments []*SlackAttachment `json:"attachments"`
- Type string `json:"type"`
+ Text string `json:"text"`
+ Username string `json:"username"`
+ IconURL string `json:"icon_url"`
+ ChannelName string `json:"channel"`
+ Props StringInterface `json:"props"`
+ Attachments SlackAttachments `json:"attachments"`
+ Type string `json:"type"`
}
func (o *IncomingWebhook) ToJson() string {
@@ -193,39 +191,6 @@ func decodeIncomingWebhookRequest(by []byte) (*IncomingWebhookRequest, error) {
}
}
-// To mention @channel via a webhook in Slack, the message should contain
-// <!channel>, as explained at the bottom of this article:
-// https://get.slack.help/hc/en-us/articles/202009646-Making-announcements
-func expandAnnouncement(text string) string {
- c1 := "<!channel>"
- c2 := "@channel"
- if strings.Contains(text, c1) {
- return strings.Replace(text, c1, c2, -1)
- }
- return text
-}
-
-// Expand announcements in incoming webhooks from Slack. Those announcements
-// can be found in the text attribute, or in the pretext, text, title and value
-// attributes of the attachment structure. The Slack attachment structure is
-// documented here: https://api.slack.com/docs/attachments
-func expandAnnouncements(i *IncomingWebhookRequest) {
- i.Text = expandAnnouncement(i.Text)
-
- for _, attachment := range i.Attachments {
- attachment.Pretext = expandAnnouncement(attachment.Pretext)
- attachment.Text = expandAnnouncement(attachment.Text)
- attachment.Title = expandAnnouncement(attachment.Title)
-
- for _, field := range attachment.Fields {
- if field.Value != nil {
- // Ensure the value is set to a string if it is set
- field.Value = expandAnnouncement(fmt.Sprintf("%v", field.Value))
- }
- }
- }
-}
-
func IncomingWebhookRequestFromJson(data io.Reader) *IncomingWebhookRequest {
buf := new(bytes.Buffer)
buf.ReadFrom(data)
@@ -241,7 +206,8 @@ func IncomingWebhookRequestFromJson(data io.Reader) *IncomingWebhookRequest {
}
}
- expandAnnouncements(o)
+ o.Text = ExpandAnnouncement(o.Text)
+ o.Attachments.Process()
return o
}
diff --git a/model/incoming_webhook_test.go b/model/incoming_webhook_test.go
index f6baca988..36f8ed6e6 100644
--- a/model/incoming_webhook_test.go
+++ b/model/incoming_webhook_test.go
@@ -230,3 +230,17 @@ func TestIncomingWebhookRequestFromJson(t *testing.T) {
}
}
}
+
+func TestIncomingWebhookNullArrayItems(t *testing.T) {
+ payload := `{"attachments":[{"fields":[{"title":"foo","value":"bar","short":true}, null]}, null]}`
+ iwr := IncomingWebhookRequestFromJson(strings.NewReader(payload))
+ if iwr == nil {
+ t.Fatal("IncomingWebhookRequest should not be nil")
+ }
+ if len(iwr.Attachments) != 1 {
+ t.Fatalf("expected one attachment")
+ }
+ if len(iwr.Attachments[0].Fields) != 1 {
+ t.Fatalf("expected one field")
+ }
+}
diff --git a/model/job.go b/model/job.go
index d539b5bf9..b0567bf1a 100644
--- a/model/job.go
+++ b/model/job.go
@@ -3,7 +3,84 @@
package model
-type Job interface {
+import (
+ "encoding/json"
+ "io"
+)
+
+const (
+ JOB_TYPE_DATA_RETENTION = "data_retention"
+ JOB_TYPE_SEARCH_INDEXING = "search_indexing"
+
+ JOB_STATUS_PENDING = "pending"
+ JOB_STATUS_IN_PROGRESS = "in_progress"
+ JOB_STATUS_SUCCESS = "success"
+ JOB_STATUS_ERROR = "error"
+ JOB_STATUS_CANCEL_REQUESTED = "cancel_requested"
+ JOB_STATUS_CANCELED = "canceled"
+)
+
+type Job struct {
+ Id string `json:"id"`
+ Type string `json:"type"`
+ Priority int64 `json:"priority"`
+ CreateAt int64 `json:"create_at"`
+ StartAt int64 `json:"start_at"`
+ LastActivityAt int64 `json:"last_activity_at"`
+ Status string `json:"status"`
+ Progress int64 `json:"progress"`
+ Data map[string]interface{} `json:"data"`
+}
+
+func (js *Job) ToJson() string {
+ if b, err := json.Marshal(js); err != nil {
+ return ""
+ } else {
+ return string(b)
+ }
+}
+
+func JobFromJson(data io.Reader) *Job {
+ var status Job
+ if err := json.NewDecoder(data).Decode(&status); err == nil {
+ return &status
+ } else {
+ return nil
+ }
+}
+
+func JobsToJson(jobs []*Job) string {
+ if b, err := json.Marshal(jobs); err != nil {
+ return ""
+ } else {
+ return string(b)
+ }
+}
+
+func JobsFromJson(data io.Reader) []*Job {
+ var jobs []*Job
+ if err := json.NewDecoder(data).Decode(&jobs); err == nil {
+ return jobs
+ } else {
+ return nil
+ }
+}
+
+func (js *Job) DataToJson() string {
+ if b, err := json.Marshal(js.Data); err != nil {
+ return ""
+ } else {
+ return string(b)
+ }
+}
+
+type Worker interface {
+ Run()
+ Stop()
+ JobChannel() chan<- Job
+}
+
+type Scheduler interface {
Run()
Stop()
}
diff --git a/model/job_status.go b/model/job_status.go
deleted file mode 100644
index cf490648f..000000000
--- a/model/job_status.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
-// See License.txt for license information.
-
-package model
-
-import (
- "encoding/json"
- "io"
-)
-
-const (
- JOB_TYPE_DATA_RETENTION = "data_retention"
- JOB_TYPE_SEARCH_INDEXING = "search_indexing"
-)
-
-type JobStatus struct {
- Id string `json:"id"`
- Type string `json:"type"`
- StartAt int64 `json:"start_at"`
- LastActivityAt int64 `json:"last_activity_at"`
- LastRunStartedAt int64 `json:"last_run_started_at"`
- LastRunCompletedAt int64 `json:"last_run_completed_at"`
- Status string `json:"status"`
- Data map[string]interface{} `json:"data"`
-}
-
-func (js *JobStatus) ToJson() string {
- if b, err := json.Marshal(js); err != nil {
- return ""
- } else {
- return string(b)
- }
-}
-
-func JobStatusFromJson(data io.Reader) *JobStatus {
- var status JobStatus
- if err := json.NewDecoder(data).Decode(&status); err == nil {
- return &status
- } else {
- return nil
- }
-}
-
-func JobStatusesToJson(statuses []*JobStatus) string {
- if b, err := json.Marshal(statuses); err != nil {
- return ""
- } else {
- return string(b)
- }
-}
-
-func JobStatusesFromJson(data io.Reader) []*JobStatus {
- var statuses []*JobStatus
- if err := json.NewDecoder(data).Decode(&statuses); err == nil {
- return statuses
- } else {
- return nil
- }
-}
diff --git a/model/license.go b/model/license.go
index 443d78282..8d53bd4cd 100644
--- a/model/license.go
+++ b/model/license.go
@@ -49,7 +49,7 @@ type Features struct {
MHPNS *bool `json:"mhpns"`
SAML *bool `json:"saml"`
PasswordRequirements *bool `json:"password_requirements"`
- ElasticSearch *bool `json:"elastic_search"`
+ Elasticsearch *bool `json:"elastic_search"`
Announcement *bool `json:"announcement"`
// after we enabled more features for webrtc we'll need to control them with this
FutureFeatures *bool `json:"future_features"`
@@ -68,7 +68,7 @@ func (f *Features) ToMap() map[string]interface{} {
"mhpns": *f.MHPNS,
"saml": *f.SAML,
"password": *f.PasswordRequirements,
- "elastic_search": *f.ElasticSearch,
+ "elastic_search": *f.Elasticsearch,
"future": *f.FutureFeatures,
}
}
@@ -139,9 +139,9 @@ func (f *Features) SetDefaults() {
*f.PasswordRequirements = *f.FutureFeatures
}
- if f.ElasticSearch == nil {
- f.ElasticSearch = new(bool)
- *f.ElasticSearch = *f.FutureFeatures
+ if f.Elasticsearch == nil {
+ f.Elasticsearch = new(bool)
+ *f.Elasticsearch = *f.FutureFeatures
}
if f.Announcement == nil {
diff --git a/model/license_test.go b/model/license_test.go
index 8b65d0700..952ab493e 100644
--- a/model/license_test.go
+++ b/model/license_test.go
@@ -45,7 +45,7 @@ func TestLicenseFeaturesSetDefaults(t *testing.T) {
CheckTrue(t, *f.MHPNS)
CheckTrue(t, *f.SAML)
CheckTrue(t, *f.PasswordRequirements)
- CheckTrue(t, *f.ElasticSearch)
+ CheckTrue(t, *f.Elasticsearch)
CheckTrue(t, *f.FutureFeatures)
f = Features{}
@@ -64,7 +64,7 @@ func TestLicenseFeaturesSetDefaults(t *testing.T) {
*f.MHPNS = true
*f.SAML = true
*f.PasswordRequirements = true
- *f.ElasticSearch = true
+ *f.Elasticsearch = true
f.SetDefaults()
@@ -80,7 +80,7 @@ func TestLicenseFeaturesSetDefaults(t *testing.T) {
CheckTrue(t, *f.MHPNS)
CheckTrue(t, *f.SAML)
CheckTrue(t, *f.PasswordRequirements)
- CheckTrue(t, *f.ElasticSearch)
+ CheckTrue(t, *f.Elasticsearch)
CheckFalse(t, *f.FutureFeatures)
}
@@ -161,7 +161,7 @@ func TestLicenseToFromJson(t *testing.T) {
CheckBool(t, *f1.MHPNS, *f.MHPNS)
CheckBool(t, *f1.SAML, *f.SAML)
CheckBool(t, *f1.PasswordRequirements, *f.PasswordRequirements)
- CheckBool(t, *f1.ElasticSearch, *f.ElasticSearch)
+ CheckBool(t, *f1.Elasticsearch, *f.Elasticsearch)
CheckBool(t, *f1.FutureFeatures, *f.FutureFeatures)
invalid := `{"asdf`
diff --git a/model/outgoing_webhook.go b/model/outgoing_webhook.go
index 3cfed9e74..59408c24e 100644
--- a/model/outgoing_webhook.go
+++ b/model/outgoing_webhook.go
@@ -7,6 +7,7 @@ import (
"encoding/json"
"fmt"
"io"
+ "net/http"
"net/url"
"strconv"
"strings"
@@ -41,6 +42,7 @@ type OutgoingWebhookPayload struct {
PostId string `json:"post_id"`
Text string `json:"text"`
TriggerWord string `json:"trigger_word"`
+ FileIds string `json:"file_ids"`
}
func (o *OutgoingWebhookPayload) ToJSON() string {
@@ -65,6 +67,7 @@ func (o *OutgoingWebhookPayload) ToFormValues() string {
v.Set("post_id", o.PostId)
v.Set("text", o.Text)
v.Set("trigger_word", o.TriggerWord)
+ v.Set("file_ids", o.FileIds)
return v.Encode()
}
@@ -112,69 +115,69 @@ func OutgoingWebhookListFromJson(data io.Reader) []*OutgoingWebhook {
func (o *OutgoingWebhook) IsValid() *AppError {
if len(o.Id) != 26 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.id.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.id.app_error", nil, "", http.StatusBadRequest)
}
if len(o.Token) != 26 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.token.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.token.app_error", nil, "", http.StatusBadRequest)
}
if o.CreateAt == 0 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.create_at.app_error", nil, "id="+o.Id)
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.create_at.app_error", nil, "id="+o.Id, http.StatusBadRequest)
}
if o.UpdateAt == 0 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.update_at.app_error", nil, "id="+o.Id)
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.update_at.app_error", nil, "id="+o.Id, http.StatusBadRequest)
}
if len(o.CreatorId) != 26 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.user_id.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.user_id.app_error", nil, "", http.StatusBadRequest)
}
if len(o.ChannelId) != 0 && len(o.ChannelId) != 26 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.channel_id.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.channel_id.app_error", nil, "", http.StatusBadRequest)
}
if len(o.TeamId) != 26 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.team_id.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.team_id.app_error", nil, "", http.StatusBadRequest)
}
if len(fmt.Sprintf("%s", o.TriggerWords)) > 1024 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.words.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.words.app_error", nil, "", http.StatusBadRequest)
}
if len(o.TriggerWords) != 0 {
for _, triggerWord := range o.TriggerWords {
if len(triggerWord) == 0 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.trigger_words.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.trigger_words.app_error", nil, "", http.StatusBadRequest)
}
}
}
if len(o.CallbackURLs) == 0 || len(fmt.Sprintf("%s", o.CallbackURLs)) > 1024 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.callback.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.callback.app_error", nil, "", http.StatusBadRequest)
}
for _, callback := range o.CallbackURLs {
if !IsValidHttpUrl(callback) {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.url.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.url.app_error", nil, "", http.StatusBadRequest)
}
}
if len(o.DisplayName) > 64 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.display_name.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.display_name.app_error", nil, "", http.StatusBadRequest)
}
if len(o.Description) > 128 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.description.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.description.app_error", nil, "", http.StatusBadRequest)
}
if len(o.ContentType) > 128 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.content_type.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.content_type.app_error", nil, "", http.StatusBadRequest)
}
if o.TriggerWhen > 1 {
- return NewLocAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.content_type.app_error", nil, "")
+ return NewAppError("OutgoingWebhook.IsValid", "model.outgoing_hook.is_valid.content_type.app_error", nil, "", http.StatusBadRequest)
}
return nil
@@ -197,8 +200,8 @@ func (o *OutgoingWebhook) PreUpdate() {
o.UpdateAt = GetMillis()
}
-func (o *OutgoingWebhook) HasTriggerWord(word string) bool {
- if len(o.TriggerWords) == 0 || len(word) == 0 {
+func (o *OutgoingWebhook) TriggerWordExactMatch(word string) bool {
+ if len(word) == 0 {
return false
}
@@ -212,7 +215,7 @@ func (o *OutgoingWebhook) HasTriggerWord(word string) bool {
}
func (o *OutgoingWebhook) TriggerWordStartsWith(word string) bool {
- if len(o.TriggerWords) == 0 || len(word) == 0 {
+ if len(word) == 0 {
return false
}
@@ -224,3 +227,27 @@ func (o *OutgoingWebhook) TriggerWordStartsWith(word string) bool {
return false
}
+
+func (o *OutgoingWebhook) GetTriggerWord(word string, isExactMatch bool) (triggerWord string) {
+ if len(word) == 0 {
+ return
+ }
+
+ if isExactMatch {
+ for _, trigger := range o.TriggerWords {
+ if trigger == word {
+ triggerWord = trigger
+ break
+ }
+ }
+ } else {
+ for _, trigger := range o.TriggerWords {
+ if strings.HasPrefix(word, trigger) {
+ triggerWord = trigger
+ break
+ }
+ }
+ }
+
+ return triggerWord
+}
diff --git a/model/outgoing_webhook_test.go b/model/outgoing_webhook_test.go
index 725423cdf..431b1f6c1 100644
--- a/model/outgoing_webhook_test.go
+++ b/model/outgoing_webhook_test.go
@@ -136,6 +136,7 @@ func TestOutgoingWebhookPayloadToFormValues(t *testing.T) {
PostId: "PostId",
Text: "Text",
TriggerWord: "TriggerWord",
+ FileIds: "FileIds",
}
v := url.Values{}
v.Set("token", "Token")
@@ -149,6 +150,7 @@ func TestOutgoingWebhookPayloadToFormValues(t *testing.T) {
v.Set("post_id", "PostId")
v.Set("text", "Text")
v.Set("trigger_word", "TriggerWord")
+ v.Set("file_ids", "FileIds")
if got, want := p.ToFormValues(), v.Encode(); !reflect.DeepEqual(got, want) {
t.Fatalf("Got %+v, wanted %+v", got, want)
}
diff --git a/model/post.go b/model/post.go
index f5a398656..55e6f591d 100644
--- a/model/post.go
+++ b/model/post.go
@@ -62,6 +62,12 @@ type PostPatch struct {
HasReactions *bool `json:"has_reactions"`
}
+type PostForIndexing struct {
+ Post
+ TeamId string `json:"team_id"`
+ ParentCreateAt *int64 `json:"parent_create_at"`
+}
+
func (o *Post) ToJson() string {
b, err := json.Marshal(o)
if err != nil {
diff --git a/model/slack_attachment.go b/model/slack_attachment.go
index 6fd0071b4..a3199c44c 100644
--- a/model/slack_attachment.go
+++ b/model/slack_attachment.go
@@ -3,6 +3,11 @@
package model
+import (
+ "fmt"
+ "strings"
+)
+
type SlackAttachment struct {
Id int64 `json:"id"`
Fallback string `json:"fallback"`
@@ -27,3 +32,50 @@ type SlackAttachmentField struct {
Value interface{} `json:"value"`
Short bool `json:"short"`
}
+
+type SlackAttachments []*SlackAttachment
+
+// To mention @channel via a webhook in Slack, the message should contain
+// <!channel>, as explained at the bottom of this article:
+// https://get.slack.help/hc/en-us/articles/202009646-Making-announcements
+func ExpandAnnouncement(text string) string {
+ c1 := "<!channel>"
+ c2 := "@channel"
+ if strings.Contains(text, c1) {
+ return strings.Replace(text, c1, c2, -1)
+ }
+ return text
+}
+
+// Expand announcements in incoming webhooks from Slack. Those announcements
+// can be found in the text attribute, or in the pretext, text, title and value
+// attributes of the attachment structure. The Slack attachment structure is
+// documented here: https://api.slack.com/docs/attachments
+func (a *SlackAttachments) Process() {
+ var nonNilAttachments []*SlackAttachment
+ for _, attachment := range *a {
+ if attachment == nil {
+ continue
+ }
+ nonNilAttachments = append(nonNilAttachments, attachment)
+
+ attachment.Pretext = ExpandAnnouncement(attachment.Pretext)
+ attachment.Text = ExpandAnnouncement(attachment.Text)
+ attachment.Title = ExpandAnnouncement(attachment.Title)
+
+ var nonNilFields []*SlackAttachmentField
+ for _, field := range attachment.Fields {
+ if field == nil {
+ continue
+ }
+ nonNilFields = append(nonNilFields, field)
+
+ if field.Value != nil {
+ // Ensure the value is set to a string if it is set
+ field.Value = ExpandAnnouncement(fmt.Sprintf("%v", field.Value))
+ }
+ }
+ attachment.Fields = nonNilFields
+ }
+ *a = nonNilAttachments
+}
diff --git a/model/slack_attachment_test.go b/model/slack_attachment_test.go
new file mode 100644
index 000000000..ff4df888f
--- /dev/null
+++ b/model/slack_attachment_test.go
@@ -0,0 +1,38 @@
+package model
+
+import (
+ "testing"
+)
+
+func TestExpandAnnouncement(t *testing.T) {
+ if ExpandAnnouncement("<!channel> foo <!channel>") != "@channel foo @channel" {
+ t.Fail()
+ }
+}
+
+func TestSlackAnnouncementProcess(t *testing.T) {
+ attachments := SlackAttachments{
+ {
+ Pretext: "<!channel> pretext",
+ Text: "<!channel> text",
+ Title: "<!channel> title",
+ Fields: []*SlackAttachmentField{
+ {
+ Title: "foo",
+ Value: "<!channel> bar",
+ Short: true,
+ }, nil,
+ },
+ }, nil,
+ }
+ attachments.Process()
+ if len(attachments) != 1 || len(attachments[0].Fields) != 1 {
+ t.Fail()
+ }
+ if attachments[0].Pretext != "@channel pretext" ||
+ attachments[0].Text != "@channel text" ||
+ attachments[0].Title != "@channel title" ||
+ attachments[0].Fields[0].Value != "@channel bar" {
+ t.Fail()
+ }
+}
diff --git a/store/layered_store.go b/store/layered_store.go
index 58c9e5ca1..ab9859c80 100644
--- a/store/layered_store.go
+++ b/store/layered_store.go
@@ -119,8 +119,8 @@ func (s *LayeredStore) Reaction() ReactionStore {
return s.DatabaseLayer.Reaction()
}
-func (s *LayeredStore) JobStatus() JobStatusStore {
- return s.DatabaseLayer.JobStatus()
+func (s *LayeredStore) Job() JobStore {
+ return s.DatabaseLayer.Job()
}
func (s *LayeredStore) MarkSystemRanUnitTests() {
diff --git a/store/sql_channel_store.go b/store/sql_channel_store.go
index c009d64d3..db9c2c1f4 100644
--- a/store/sql_channel_store.go
+++ b/store/sql_channel_store.go
@@ -361,7 +361,7 @@ func (s SqlChannelStore) GetPinnedPosts(channelId string) StoreChannel {
go func() {
result := StoreResult{}
- pl := &model.PostList{}
+ pl := model.NewPostList()
var posts []*model.Post
if _, err := s.GetReplica().Select(&posts, "SELECT * FROM Posts WHERE IsPinned = true AND ChannelId = :ChannelId AND DeleteAt = 0 ORDER BY CreateAt ASC", map[string]interface{}{"ChannelId": channelId}); err != nil {
diff --git a/store/sql_job_status_store.go b/store/sql_job_status_store.go
deleted file mode 100644
index a87b8267b..000000000
--- a/store/sql_job_status_store.go
+++ /dev/null
@@ -1,190 +0,0 @@
-// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
-// See License.txt for license information.
-
-package store
-
-import (
- "database/sql"
- "net/http"
-
- "github.com/mattermost/platform/model"
-)
-
-type SqlJobStatusStore struct {
- SqlStore
-}
-
-func NewSqlJobStatusStore(sqlStore SqlStore) JobStatusStore {
- s := &SqlJobStatusStore{sqlStore}
-
- for _, db := range sqlStore.GetAllConns() {
- table := db.AddTableWithName(model.JobStatus{}, "JobStatuses").SetKeys(false, "Id")
- table.ColMap("Id").SetMaxSize(26)
- table.ColMap("Type").SetMaxSize(32)
- table.ColMap("Status").SetMaxSize(32)
- table.ColMap("Data").SetMaxSize(1024)
- }
-
- return s
-}
-
-func (jss SqlJobStatusStore) CreateIndexesIfNotExists() {
- jss.CreateIndexIfNotExists("idx_jobstatuses_type", "JobStatuses", "Type")
-}
-
-func (jss SqlJobStatusStore) SaveOrUpdate(status *model.JobStatus) StoreChannel {
- storeChannel := make(StoreChannel, 1)
-
- go func() {
- result := StoreResult{}
-
- if err := jss.GetReplica().SelectOne(&model.JobStatus{},
- `SELECT
- *
- FROM
- JobStatuses
- WHERE
- Id = :Id`, map[string]interface{}{"Id": status.Id}); err == nil {
- if _, err := jss.GetMaster().Update(status); err != nil {
- result.Err = model.NewLocAppError("SqlJobStatusStore.SaveOrUpdate",
- "store.sql_job_status.update.app_error", nil, "id="+status.Id+", "+err.Error())
- }
- } else if err == sql.ErrNoRows {
- if err := jss.GetMaster().Insert(status); err != nil {
- result.Err = model.NewLocAppError("SqlJobStatusStore.SaveOrUpdate",
- "store.sql_job_status.save.app_error", nil, "id="+status.Id+", "+err.Error())
- }
- } else {
- result.Err = model.NewLocAppError("SqlJobStatusStore.SaveOrUpdate",
- "store.sql_job_status.save_or_update.app_error", nil, "id="+status.Id+", "+err.Error())
- }
-
- if result.Err == nil {
- result.Data = status
- }
-
- storeChannel <- result
- close(storeChannel)
- }()
-
- return storeChannel
-}
-
-func (jss SqlJobStatusStore) Get(id string) StoreChannel {
- storeChannel := make(StoreChannel, 1)
-
- go func() {
- result := StoreResult{}
-
- var status *model.JobStatus
-
- if err := jss.GetReplica().SelectOne(&status,
- `SELECT
- *
- FROM
- JobStatuses
- WHERE
- Id = :Id`, map[string]interface{}{"Id": id}); err != nil {
- if err == sql.ErrNoRows {
- result.Err = model.NewAppError("SqlJobStatusStore.Get",
- "store.sql_job_status.get.app_error", nil, "Id="+id+", "+err.Error(), http.StatusNotFound)
- } else {
- result.Err = model.NewAppError("SqlJobStatusStore.Get",
- "store.sql_job_status.get.app_error", nil, "Id="+id+", "+err.Error(), http.StatusInternalServerError)
- }
- } else {
- result.Data = status
- }
-
- storeChannel <- result
- close(storeChannel)
- }()
-
- return storeChannel
-}
-
-func (jss SqlJobStatusStore) GetAllByType(jobType string) StoreChannel {
- storeChannel := make(StoreChannel, 1)
-
- go func() {
- result := StoreResult{}
-
- var statuses []*model.JobStatus
-
- if _, err := jss.GetReplica().Select(&statuses,
- `SELECT
- *
- FROM
- JobStatuses
- WHERE
- Type = :Type`, map[string]interface{}{"Type": jobType}); err != nil {
- result.Err = model.NewLocAppError("SqlJobStatusStore.GetAllByType",
- "store.sql_job_status.get_all_by_type.app_error", nil, "Type="+jobType+", "+err.Error())
- } else {
- result.Data = statuses
- }
-
- storeChannel <- result
- close(storeChannel)
- }()
-
- return storeChannel
-}
-
-func (jss SqlJobStatusStore) GetAllByTypePage(jobType string, offset int, limit int) StoreChannel {
- storeChannel := make(StoreChannel, 1)
-
- go func() {
- result := StoreResult{}
-
- var statuses []*model.JobStatus
-
- if _, err := jss.GetReplica().Select(&statuses,
- `SELECT
- *
- FROM
- JobStatuses
- WHERE
- Type = :Type
- ORDER BY
- StartAt ASC
- LIMIT
- :Limit
- OFFSET
- :Offset`, map[string]interface{}{"Type": jobType, "Limit": limit, "Offset": offset}); err != nil {
- result.Err = model.NewLocAppError("SqlJobStatusStore.GetAllByTypePage",
- "store.sql_job_status.get_all_by_type_page.app_error", nil, "Type="+jobType+", "+err.Error())
- } else {
- result.Data = statuses
- }
-
- storeChannel <- result
- close(storeChannel)
- }()
-
- return storeChannel
-}
-
-func (jss SqlJobStatusStore) Delete(id string) StoreChannel {
- storeChannel := make(StoreChannel, 1)
-
- go func() {
- result := StoreResult{}
-
- if _, err := jss.GetReplica().Exec(
- `DELETE FROM
- JobStatuses
- WHERE
- Id = :Id`, map[string]interface{}{"Id": id}); err != nil {
- result.Err = model.NewLocAppError("SqlJobStatusStore.DeleteByType",
- "store.sql_job_status.delete.app_error", nil, "id="+id+", "+err.Error())
- } else {
- result.Data = id
- }
-
- storeChannel <- result
- close(storeChannel)
- }()
-
- return storeChannel
-}
diff --git a/store/sql_job_status_store_test.go b/store/sql_job_status_store_test.go
deleted file mode 100644
index 18c29e522..000000000
--- a/store/sql_job_status_store_test.go
+++ /dev/null
@@ -1,151 +0,0 @@
-// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
-// See License.txt for license information.
-
-package store
-
-import (
- "testing"
-
- "github.com/mattermost/platform/model"
-)
-
-func TestJobStatusSaveGetUpdate(t *testing.T) {
- Setup()
-
- status := &model.JobStatus{
- Id: model.NewId(),
- Type: model.NewId(),
- Status: model.NewId(),
- Data: map[string]interface{}{
- "Processed": 0,
- "Total": 12345,
- "LastProcessed": "abcd",
- },
- }
-
- if result := <-store.JobStatus().SaveOrUpdate(status); result.Err != nil {
- t.Fatal(result.Err)
- }
-
- defer func() {
- <-store.JobStatus().Delete(status.Id)
- }()
-
- if result := <-store.JobStatus().Get(status.Id); result.Err != nil {
- t.Fatal(result.Err)
- } else if received := result.Data.(*model.JobStatus); received.Id != status.Id {
- t.Fatal("received incorrect status after save")
- }
-
- status.Status = model.NewId()
- status.Data = map[string]interface{}{
- "Processed": 12345,
- "Total": 12345,
- "LastProcessed": "abcd",
- }
-
- if result := <-store.JobStatus().SaveOrUpdate(status); result.Err != nil {
- t.Fatal(result.Err)
- }
-
- if result := <-store.JobStatus().Get(status.Id); result.Err != nil {
- t.Fatal(result.Err)
- } else if received := result.Data.(*model.JobStatus); received.Id != status.Id || received.Status != status.Status {
- t.Fatal("received incorrect status after update")
- }
-}
-
-func TestJobStatusGetAllByType(t *testing.T) {
- Setup()
-
- jobType := model.NewId()
-
- statuses := []*model.JobStatus{
- {
- Id: model.NewId(),
- Type: jobType,
- },
- {
- Id: model.NewId(),
- Type: jobType,
- },
- {
- Id: model.NewId(),
- Type: model.NewId(),
- },
- }
-
- for _, status := range statuses {
- Must(store.JobStatus().SaveOrUpdate(status))
- defer store.JobStatus().Delete(status.Id)
- }
-
- if result := <-store.JobStatus().GetAllByType(jobType); result.Err != nil {
- t.Fatal(result.Err)
- } else if received := result.Data.([]*model.JobStatus); len(received) != 2 {
- t.Fatal("received wrong number of statuses")
- } else if received[0].Id != statuses[0].Id && received[1].Id != statuses[0].Id {
- t.Fatal("should've received first status")
- } else if received[0].Id != statuses[1].Id && received[1].Id != statuses[1].Id {
- t.Fatal("should've received second status")
- }
-}
-
-func TestJobStatusGetAllByTypePage(t *testing.T) {
- Setup()
-
- jobType := model.NewId()
-
- statuses := []*model.JobStatus{
- {
- Id: model.NewId(),
- Type: jobType,
- StartAt: 1000,
- },
- {
- Id: model.NewId(),
- Type: jobType,
- StartAt: 999,
- },
- {
- Id: model.NewId(),
- Type: jobType,
- StartAt: 1001,
- },
- }
-
- for _, status := range statuses {
- Must(store.JobStatus().SaveOrUpdate(status))
- defer store.JobStatus().Delete(status.Id)
- }
-
- if result := <-store.JobStatus().GetAllByTypePage(jobType, 0, 2); result.Err != nil {
- t.Fatal(result.Err)
- } else if received := result.Data.([]*model.JobStatus); len(received) != 2 {
- t.Fatal("received wrong number of statuses")
- } else if received[0].Id != statuses[1].Id {
- t.Fatal("should've received newest job first")
- } else if received[1].Id != statuses[0].Id {
- t.Fatal("should've received second newest job second")
- }
-
- if result := <-store.JobStatus().GetAllByTypePage(jobType, 2, 2); result.Err != nil {
- t.Fatal(result.Err)
- } else if received := result.Data.([]*model.JobStatus); len(received) != 1 {
- t.Fatal("received wrong number of statuses")
- } else if received[0].Id != statuses[2].Id {
- t.Fatal("should've received oldest job last")
- }
-}
-
-func TestJobStatusDelete(t *testing.T) {
- Setup()
-
- status := Must(store.JobStatus().SaveOrUpdate(&model.JobStatus{
- Id: model.NewId(),
- })).(*model.JobStatus)
-
- if result := <-store.JobStatus().Delete(status.Id); result.Err != nil {
- t.Fatal(result.Err)
- }
-}
diff --git a/store/sql_job_store.go b/store/sql_job_store.go
new file mode 100644
index 000000000..c00e37d86
--- /dev/null
+++ b/store/sql_job_store.go
@@ -0,0 +1,327 @@
+// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package store
+
+import (
+ "database/sql"
+ "net/http"
+
+ "github.com/mattermost/gorp"
+ "github.com/mattermost/platform/model"
+)
+
+type SqlJobStore struct {
+ SqlStore
+}
+
+func NewSqlJobStore(sqlStore SqlStore) JobStore {
+ s := &SqlJobStore{sqlStore}
+
+ for _, db := range sqlStore.GetAllConns() {
+ table := db.AddTableWithName(model.Job{}, "Jobs").SetKeys(false, "Id")
+ table.ColMap("Id").SetMaxSize(26)
+ table.ColMap("Type").SetMaxSize(32)
+ table.ColMap("Status").SetMaxSize(32)
+ table.ColMap("Data").SetMaxSize(1024)
+ }
+
+ return s
+}
+
+func (jss SqlJobStore) CreateIndexesIfNotExists() {
+ jss.CreateIndexIfNotExists("idx_jobs_type", "Jobs", "Type")
+}
+
+func (jss SqlJobStore) Save(job *model.Job) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+ if err := jss.GetMaster().Insert(job); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.Save",
+ "store.sql_job.save.app_error", nil, "id="+job.Id+", "+err.Error())
+ } else {
+ result.Data = job
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) UpdateOptimistically(job *model.Job, currentStatus string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ if sqlResult, err := jss.GetMaster().Exec(
+ `UPDATE
+ Jobs
+ SET
+ LastActivityAt = :LastActivityAt,
+ Status = :Status,
+ Progress = :Progress,
+ Data = :Data
+ WHERE
+ Id = :Id
+ AND
+ Status = :OldStatus`,
+ map[string]interface{}{
+ "Id": job.Id,
+ "OldStatus": currentStatus,
+ "LastActivityAt": model.GetMillis(),
+ "Status": job.Status,
+ "Data": job.DataToJson(),
+ "Progress": job.Progress,
+ }); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.UpdateOptimistically",
+ "store.sql_job.update.app_error", nil, "id="+job.Id+", "+err.Error())
+ } else {
+ rows, err := sqlResult.RowsAffected()
+
+ if err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.UpdateStatus",
+ "store.sql_job.update.app_error", nil, "id="+job.Id+", "+err.Error())
+ } else {
+ if rows == 1 {
+ result.Data = true
+ } else {
+ result.Data = false
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) UpdateStatus(id string, status string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ job := &model.Job{
+ Id: id,
+ Status: status,
+ LastActivityAt: model.GetMillis(),
+ }
+
+ if _, err := jss.GetMaster().UpdateColumns(func(col *gorp.ColumnMap) bool {
+ return col.ColumnName == "Status" || col.ColumnName == "LastActivityAt"
+ }, job); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.UpdateStatus",
+ "store.sql_job.update.app_error", nil, "id="+id+", "+err.Error())
+ }
+
+ if result.Err == nil {
+ result.Data = job
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) UpdateStatusOptimistically(id string, currentStatus string, newStatus string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ var startAtClause string
+ if newStatus == model.JOB_STATUS_IN_PROGRESS {
+ startAtClause = `StartAt = :StartAt,`
+ }
+
+ if sqlResult, err := jss.GetMaster().Exec(
+ `UPDATE
+ Jobs
+ SET `+startAtClause+`
+ Status = :NewStatus,
+ LastActivityAt = :LastActivityAt
+ WHERE
+ Id = :Id
+ AND
+ Status = :OldStatus`, map[string]interface{}{"Id": id, "OldStatus": currentStatus, "NewStatus": newStatus, "StartAt": model.GetMillis(), "LastActivityAt": model.GetMillis()}); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.UpdateStatus",
+ "store.sql_job.update.app_error", nil, "id="+id+", "+err.Error())
+ } else {
+ rows, err := sqlResult.RowsAffected()
+
+ if err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.UpdateStatus",
+ "store.sql_job.update.app_error", nil, "id="+id+", "+err.Error())
+ } else {
+ if rows == 1 {
+ result.Data = true
+ } else {
+ result.Data = false
+ }
+ }
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) Get(id string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ var status *model.Job
+
+ if err := jss.GetReplica().SelectOne(&status,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Id = :Id`, map[string]interface{}{"Id": id}); err != nil {
+ if err == sql.ErrNoRows {
+ result.Err = model.NewAppError("SqlJobStore.Get",
+ "store.sql_job.get.app_error", nil, "Id="+id+", "+err.Error(), http.StatusNotFound)
+ } else {
+ result.Err = model.NewAppError("SqlJobStore.Get",
+ "store.sql_job.get.app_error", nil, "Id="+id+", "+err.Error(), http.StatusInternalServerError)
+ }
+ } else {
+ result.Data = status
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllByType(jobType string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Type = :Type`, map[string]interface{}{"Type": jobType}); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.GetAllByType",
+ "store.sql_job.get_all.app_error", nil, "Type="+jobType+", "+err.Error())
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllByTypePage(jobType string, offset int, limit int) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Type = :Type
+ ORDER BY
+ StartAt ASC
+ LIMIT
+ :Limit
+ OFFSET
+ :Offset`, map[string]interface{}{"Type": jobType, "Limit": limit, "Offset": offset}); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.GetAllByTypePage",
+ "store.sql_job.get_all.app_error", nil, "Type="+jobType+", "+err.Error())
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) GetAllByStatus(status string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ var statuses []*model.Job
+
+ if _, err := jss.GetReplica().Select(&statuses,
+ `SELECT
+ *
+ FROM
+ Jobs
+ WHERE
+ Status = :Status
+ ORDER BY
+ CreateAt ASC`, map[string]interface{}{"Status": status}); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.GetAllByStatus",
+ "store.sql_job.get_all.app_error", nil, "Status="+status+", "+err.Error())
+ } else {
+ result.Data = statuses
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
+
+func (jss SqlJobStore) Delete(id string) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ if _, err := jss.GetMaster().Exec(
+ `DELETE FROM
+ Jobs
+ WHERE
+ Id = :Id`, map[string]interface{}{"Id": id}); err != nil {
+ result.Err = model.NewLocAppError("SqlJobStore.DeleteByType",
+ "store.sql_job.delete.app_error", nil, "id="+id+", "+err.Error())
+ } else {
+ result.Data = id
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sql_job_store_test.go b/store/sql_job_store_test.go
new file mode 100644
index 000000000..edf09a4c0
--- /dev/null
+++ b/store/sql_job_store_test.go
@@ -0,0 +1,341 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+package store
+
+import (
+ "testing"
+
+ "github.com/mattermost/platform/model"
+ "time"
+)
+
+func TestJobSaveGet(t *testing.T) {
+ Setup()
+
+ job := &model.Job{
+ Id: model.NewId(),
+ Type: model.NewId(),
+ Status: model.NewId(),
+ Data: map[string]interface{}{
+ "Processed": 0,
+ "Total": 12345,
+ "LastProcessed": "abcd",
+ },
+ }
+
+ if result := <-store.Job().Save(job); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+
+ defer func() {
+ <-store.Job().Delete(job.Id)
+ }()
+
+ if result := <-store.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.(*model.Job); received.Id != job.Id {
+ t.Fatal("received incorrect job after save")
+ }
+}
+
+func TestJobGetAllByType(t *testing.T) {
+ Setup()
+
+ jobType := model.NewId()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ },
+ {
+ Id: model.NewId(),
+ Type: model.NewId(),
+ },
+ }
+
+ for _, job := range jobs {
+ Must(store.Job().Save(job))
+ defer store.Job().Delete(job.Id)
+ }
+
+ if result := <-store.Job().GetAllByType(jobType); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 2 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[0].Id && received[1].Id != jobs[0].Id {
+ t.Fatal("should've received first jobs")
+ } else if received[0].Id != jobs[1].Id && received[1].Id != jobs[1].Id {
+ t.Fatal("should've received second jobs")
+ }
+}
+
+func TestJobGetAllByTypePage(t *testing.T) {
+ Setup()
+
+ jobType := model.NewId()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ StartAt: 1000,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ StartAt: 999,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ StartAt: 1001,
+ },
+ }
+
+ for _, job := range jobs {
+ Must(store.Job().Save(job))
+ defer store.Job().Delete(job.Id)
+ }
+
+ if result := <-store.Job().GetAllByTypePage(jobType, 0, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 2 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[1].Id {
+ t.Fatal("should've received newest job first")
+ } else if received[1].Id != jobs[0].Id {
+ t.Fatal("should've received second newest job second")
+ }
+
+ if result := <-store.Job().GetAllByTypePage(jobType, 2, 2); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 1 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[2].Id {
+ t.Fatal("should've received oldest job last")
+ }
+}
+
+func TestJobGetAllByStatus(t *testing.T) {
+ jobType := model.NewId()
+ status := model.NewId()
+
+ jobs := []*model.Job{
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1000,
+ Status: status,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 999,
+ Status: status,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1001,
+ Status: status,
+ },
+ {
+ Id: model.NewId(),
+ Type: jobType,
+ CreateAt: 1002,
+ Status: model.NewId(),
+ },
+ }
+
+ for _, job := range jobs {
+ Must(store.Job().Save(job))
+ defer store.Job().Delete(job.Id)
+ }
+
+ if result := <-store.Job().GetAllByStatus(status); result.Err != nil {
+ t.Fatal(result.Err)
+ } else if received := result.Data.([]*model.Job); len(received) != 3 {
+ t.Fatal("received wrong number of jobs")
+ } else if received[0].Id != jobs[0].Id && received[1].Id != jobs[0].Id {
+ t.Fatal("should've received first jobs")
+ } else if received[0].Id != jobs[1].Id && received[1].Id != jobs[1].Id {
+ t.Fatal("should've received second jobs")
+ }
+}
+
+func TestJobUpdateOptimistically(t *testing.T) {
+ job := &model.Job{
+ Id: model.NewId(),
+ Type: model.JOB_TYPE_DATA_RETENTION,
+ CreateAt: model.GetMillis(),
+ Status: model.JOB_STATUS_PENDING,
+ }
+
+ if result := <-store.Job().Save(job); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+ defer store.Job().Delete(job.Id)
+
+ job.LastActivityAt = model.GetMillis()
+ job.Status = model.JOB_STATUS_IN_PROGRESS
+ job.Progress = 50
+ job.Data = map[string]interface{}{
+ "Foo": "Bar",
+ }
+
+ if result := <-store.Job().UpdateOptimistically(job, model.JOB_STATUS_SUCCESS); result.Err != nil {
+ if result.Data.(bool) {
+ t.Fatal("should have failed due to incorrect old status")
+ }
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-store.Job().UpdateOptimistically(job, model.JOB_STATUS_PENDING); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if !result.Data.(bool) {
+ t.Fatal("Should have successfully updated")
+ }
+
+ var updatedJob *model.Job
+
+ if result := <-store.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ updatedJob = result.Data.(*model.Job)
+ }
+
+ if updatedJob.Type != job.Type || updatedJob.CreateAt != job.CreateAt || updatedJob.Status != job.Status || updatedJob.LastActivityAt <= job.LastActivityAt || updatedJob.Progress != job.Progress || updatedJob.Data["Foo"] != job.Data["Foo"] {
+ t.Fatal("Some update property was not as expected")
+ }
+ }
+
+}
+
+func TestJobUpdateStatusUpdateStatusOptimistically(t *testing.T) {
+ job := &model.Job{
+ Id: model.NewId(),
+ Type: model.JOB_TYPE_DATA_RETENTION,
+ CreateAt: model.GetMillis(),
+ Status: model.JOB_STATUS_SUCCESS,
+ }
+
+ var lastUpdateAt int64
+ if result := <-store.Job().Save(job); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ lastUpdateAt = result.Data.(*model.Job).LastActivityAt
+ }
+
+ defer store.Job().Delete(job.Id)
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-store.Job().UpdateStatus(job.Id, model.JOB_STATUS_PENDING); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_PENDING {
+ t.Fatal("status wasn't updated")
+ }
+ if received.LastActivityAt <= lastUpdateAt {
+ t.Fatal("lastActivityAt wasn't updated")
+ }
+ lastUpdateAt = received.LastActivityAt
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-store.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_IN_PROGRESS, model.JOB_STATUS_SUCCESS); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if result.Data.(bool) {
+ t.Fatal("should be false due to incorrect original status")
+ }
+ }
+
+ if result := <-store.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_PENDING {
+ t.Fatal("should still be pending")
+ }
+ if received.LastActivityAt != lastUpdateAt {
+ t.Fatal("last activity at shouldn't have changed")
+ }
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-store.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_PENDING, model.JOB_STATUS_IN_PROGRESS); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if !result.Data.(bool) {
+ t.Fatal("should have succeeded")
+ }
+ }
+
+ var startAtSet int64
+ if result := <-store.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_IN_PROGRESS {
+ t.Fatal("should be in progress")
+ }
+ if received.StartAt == 0 {
+ t.Fatal("received should have start at set")
+ }
+ if received.LastActivityAt <= lastUpdateAt {
+ t.Fatal("lastActivityAt wasn't updated")
+ }
+ lastUpdateAt = received.LastActivityAt
+ startAtSet = received.StartAt
+ }
+
+ time.Sleep(2 * time.Millisecond)
+
+ if result := <-store.Job().UpdateStatusOptimistically(job.Id, model.JOB_STATUS_IN_PROGRESS, model.JOB_STATUS_SUCCESS); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ if !result.Data.(bool) {
+ t.Fatal("should have succeeded")
+ }
+ }
+
+ if result := <-store.Job().Get(job.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ } else {
+ received := result.Data.(*model.Job)
+ if received.Status != model.JOB_STATUS_SUCCESS {
+ t.Fatal("should be success status")
+ }
+ if received.StartAt != startAtSet {
+ t.Fatal("startAt should not have changed")
+ }
+ if received.LastActivityAt <= lastUpdateAt {
+ t.Fatal("lastActivityAt wasn't updated")
+ }
+ lastUpdateAt = received.LastActivityAt
+ }
+}
+
+func TestJobDelete(t *testing.T) {
+ Setup()
+
+ status := Must(store.Job().Save(&model.Job{
+ Id: model.NewId(),
+ })).(*model.Job)
+
+ if result := <-store.Job().Delete(status.Id); result.Err != nil {
+ t.Fatal(result.Err)
+ }
+}
diff --git a/store/sql_post_store.go b/store/sql_post_store.go
index 6db2d5992..e89b5e042 100644
--- a/store/sql_post_store.go
+++ b/store/sql_post_store.go
@@ -910,8 +910,7 @@ func (s SqlPostStore) Search(teamId string, userId string, params *model.SearchP
result := StoreResult{}
if !*utils.Cfg.ServiceSettings.EnablePostSearch {
- list := &model.PostList{}
- list.MakeNonNil()
+ list := model.NewPostList()
result.Data = list
result.Err = model.NewLocAppError("SqlPostStore.Search", "store.sql_post.search.disabled", nil, fmt.Sprintf("teamId=%v userId=%v params=%v", teamId, userId, params.ToJson()))
@@ -1315,3 +1314,44 @@ func (s SqlPostStore) GetPostsByIds(postIds []string) StoreChannel {
return storeChannel
}
+
+func (s SqlPostStore) GetPostsBatchForIndexing(startTime int64, limit int) StoreChannel {
+ storeChannel := make(StoreChannel, 1)
+
+ go func() {
+ result := StoreResult{}
+
+ var posts []*model.PostForIndexing
+ _, err1 := s.GetSearchReplica().Select(&posts,
+ `(SELECT
+ Posts.*,
+ Channels.TeamId,
+ ParentPosts.CreateAt ParentCreateAt
+ FROM
+ Posts
+ LEFT JOIN
+ Channels
+ ON
+ Posts.ChannelId = Channels.Id
+ LEFT JOIN
+ Posts ParentPosts
+ ON
+ Posts.RootId = ParentPosts.Id
+ WHERE
+ Posts.CreateAt >= :StartTime
+ ORDER BY CreateAt ASC
+ LIMIT :NumPosts)`,
+ map[string]interface{}{"StartTime": startTime, "NumPosts": limit})
+
+ if err1 != nil {
+ result.Err = model.NewLocAppError("SqlPostStore.GetPostContext", "store.sql_post.get_posts_batch_for_indexing.get.app_error", nil, err1.Error())
+ } else {
+ result.Data = posts
+ }
+
+ storeChannel <- result
+ close(storeChannel)
+ }()
+
+ return storeChannel
+}
diff --git a/store/sql_post_store_test.go b/store/sql_post_store_test.go
index 00d4185b4..27e816996 100644
--- a/store/sql_post_store_test.go
+++ b/store/sql_post_store_test.go
@@ -1592,3 +1592,72 @@ func TestPostStoreGetPostsByIds(t *testing.T) {
t.Fatalf("Expected 2 posts in results. Got %v", len(ro5))
}
}
+
+func TestPostStoreGetPostsBatchForIndexing(t *testing.T) {
+ Setup()
+
+ c1 := &model.Channel{}
+ c1.TeamId = model.NewId()
+ c1.DisplayName = "Channel1"
+ c1.Name = "zz" + model.NewId() + "b"
+ c1.Type = model.CHANNEL_OPEN
+ c1 = (<-store.Channel().Save(c1)).Data.(*model.Channel)
+
+ c2 := &model.Channel{}
+ c2.TeamId = model.NewId()
+ c2.DisplayName = "Channel2"
+ c2.Name = "zz" + model.NewId() + "b"
+ c2.Type = model.CHANNEL_OPEN
+ c2 = (<-store.Channel().Save(c2)).Data.(*model.Channel)
+
+ o1 := &model.Post{}
+ o1.ChannelId = c1.Id
+ o1.UserId = model.NewId()
+ o1.Message = "zz" + model.NewId() + "AAAAAAAAAAA"
+ o1 = (<-store.Post().Save(o1)).Data.(*model.Post)
+
+ o2 := &model.Post{}
+ o2.ChannelId = c2.Id
+ o2.UserId = model.NewId()
+ o2.Message = "zz" + model.NewId() + "CCCCCCCCC"
+ o2 = (<-store.Post().Save(o2)).Data.(*model.Post)
+
+ o3 := &model.Post{}
+ o3.ChannelId = c1.Id
+ o3.UserId = model.NewId()
+ o3.ParentId = o1.Id
+ o3.RootId = o1.Id
+ o3.Message = "zz" + model.NewId() + "QQQQQQQQQQ"
+ o3 = (<-store.Post().Save(o3)).Data.(*model.Post)
+
+ if r := Must(store.Post().GetPostsBatchForIndexing(o1.CreateAt, 100)).([]*model.PostForIndexing); len(r) != 3 {
+ t.Fatalf("Expected 3 posts in results. Got %v", len(r))
+ } else {
+ for _, p := range r {
+ if p.Id == o1.Id {
+ if p.TeamId != c1.TeamId {
+ t.Fatalf("Unexpected team ID")
+ }
+ if p.ParentCreateAt != nil {
+ t.Fatalf("Unexpected parent create at")
+ }
+ } else if p.Id == o2.Id {
+ if p.TeamId != c2.TeamId {
+ t.Fatalf("Unexpected team ID")
+ }
+ if p.ParentCreateAt != nil {
+ t.Fatalf("Unexpected parent create at")
+ }
+ } else if p.Id == o3.Id {
+ if p.TeamId != c1.TeamId {
+ t.Fatalf("Unexpected team ID")
+ }
+ if *p.ParentCreateAt != o1.CreateAt {
+ t.Fatalf("Unexpected parent create at")
+ }
+ } else {
+ t.Fatalf("unexpected post returned")
+ }
+ }
+ }
+}
diff --git a/store/sql_store.go b/store/sql_store.go
index dc3b51d0c..a039401f3 100644
--- a/store/sql_store.go
+++ b/store/sql_store.go
@@ -79,4 +79,5 @@ type SqlStore interface {
Status() StatusStore
FileInfo() FileInfoStore
Reaction() ReactionStore
+ Job() JobStore
}
diff --git a/store/sql_supplier.go b/store/sql_supplier.go
index 6f51cbd09..0f4ab8380 100644
--- a/store/sql_supplier.go
+++ b/store/sql_supplier.go
@@ -82,7 +82,7 @@ type SqlSupplierOldStores struct {
status StatusStore
fileInfo FileInfoStore
reaction ReactionStore
- jobStatus JobStatusStore
+ job JobStore
}
type SqlSupplier struct {
@@ -121,7 +121,7 @@ func NewSqlSupplier() *SqlSupplier {
supplier.oldStores.status = NewSqlStatusStore(supplier)
supplier.oldStores.fileInfo = NewSqlFileInfoStore(supplier)
supplier.oldStores.reaction = NewSqlReactionStore(supplier)
- supplier.oldStores.jobStatus = NewSqlJobStatusStore(supplier)
+ supplier.oldStores.job = NewSqlJobStore(supplier)
err := supplier.GetMaster().CreateTablesIfNotExists()
if err != nil {
@@ -150,7 +150,7 @@ func NewSqlSupplier() *SqlSupplier {
supplier.oldStores.status.(*SqlStatusStore).CreateIndexesIfNotExists()
supplier.oldStores.fileInfo.(*SqlFileInfoStore).CreateIndexesIfNotExists()
supplier.oldStores.reaction.(*SqlReactionStore).CreateIndexesIfNotExists()
- supplier.oldStores.jobStatus.(*SqlJobStatusStore).CreateIndexesIfNotExists()
+ supplier.oldStores.job.(*SqlJobStore).CreateIndexesIfNotExists()
supplier.oldStores.preference.(*SqlPreferenceStore).DeleteUnusedFeatures()
@@ -752,8 +752,8 @@ func (ss *SqlSupplier) Reaction() ReactionStore {
return ss.oldStores.reaction
}
-func (ss *SqlSupplier) JobStatus() JobStatusStore {
- return ss.oldStores.jobStatus
+func (ss *SqlSupplier) Job() JobStore {
+ return ss.oldStores.job
}
func (ss *SqlSupplier) DropAllTables() {
diff --git a/store/sql_upgrade.go b/store/sql_upgrade.go
index 463415851..a7b72124e 100644
--- a/store/sql_upgrade.go
+++ b/store/sql_upgrade.go
@@ -15,6 +15,7 @@ import (
)
const (
+ VERSION_4_1_0 = "4.1.0"
VERSION_4_0_0 = "4.0.0"
VERSION_3_10_0 = "3.10.0"
VERSION_3_9_0 = "3.9.0"
@@ -49,6 +50,7 @@ func UpgradeDatabase(sqlStore SqlStore) {
UpgradeDatabaseToVersion39(sqlStore)
UpgradeDatabaseToVersion310(sqlStore)
UpgradeDatabaseToVersion40(sqlStore)
+ UpgradeDatabaseToVersion41(sqlStore)
// If the SchemaVersion is empty this this is the first time it has ran
// so lets set it to the current version.
@@ -276,3 +278,11 @@ func UpgradeDatabaseToVersion40(sqlStore SqlStore) {
saveSchemaVersion(sqlStore, VERSION_4_0_0)
}
}
+
+func UpgradeDatabaseToVersion41(sqlStore SqlStore) {
+ // TODO: Uncomment following condition when version 4.1.0 is released
+ // if shouldPerformUpgrade(sqlStore, VERSION_4_0_0, VERSION_4_1_0) {
+ sqlStore.RemoveTableIfExists("JobStatuses")
+ // saveSchemaVersion(sqlStore, VERSION_4_1_0)
+ // }
+}
diff --git a/store/store.go b/store/store.go
index 0007f495e..062ed0fbd 100644
--- a/store/store.go
+++ b/store/store.go
@@ -48,7 +48,7 @@ type Store interface {
Status() StatusStore
FileInfo() FileInfoStore
Reaction() ReactionStore
- JobStatus() JobStatusStore
+ Job() JobStore
MarkSystemRanUnitTests()
Close()
DropAllTables()
@@ -168,6 +168,7 @@ type PostStore interface {
GetPostsCreatedAt(channelId string, time int64) StoreChannel
Overwrite(post *model.Post) StoreChannel
GetPostsByIds(postIds []string) StoreChannel
+ GetPostsBatchForIndexing(startTime int64, limit int) StoreChannel
}
type UserStore interface {
@@ -384,10 +385,14 @@ type ReactionStore interface {
DeleteAllWithEmojiName(emojiName string) StoreChannel
}
-type JobStatusStore interface {
- SaveOrUpdate(status *model.JobStatus) StoreChannel
+type JobStore interface {
+ Save(job *model.Job) StoreChannel
+ UpdateOptimistically(job *model.Job, currentStatus string) StoreChannel
+ UpdateStatus(id string, status string) StoreChannel
+ UpdateStatusOptimistically(id string, currentStatus string, newStatus string) StoreChannel
Get(id string) StoreChannel
GetAllByType(jobType string) StoreChannel
GetAllByTypePage(jobType string, offset int, limit int) StoreChannel
+ GetAllByStatus(status string) StoreChannel
Delete(id string) StoreChannel
}
diff --git a/utils/config.go b/utils/config.go
index 22ece13a4..478e3ba99 100644
--- a/utils/config.go
+++ b/utils/config.go
@@ -536,11 +536,6 @@ func getClientConfig(c *model.Config) map[string]string {
props["PasswordRequireSymbol"] = strconv.FormatBool(*c.PasswordSettings.Symbol)
}
- if *License.Features.ElasticSearch {
- props["ElasticSearchEnableIndexing"] = strconv.FormatBool(*c.ElasticSearchSettings.EnableIndexing)
- props["ElasticSearchEnableSearching"] = strconv.FormatBool(*c.ElasticSearchSettings.EnableSearching)
- }
-
if *License.Features.Announcement {
props["EnableBanner"] = strconv.FormatBool(*c.AnnouncementSettings.EnableBanner)
props["BannerText"] = *c.AnnouncementSettings.BannerText
@@ -616,12 +611,6 @@ func Desanitize(cfg *model.Config) {
cfg.SqlSettings.AtRestEncryptKey = Cfg.SqlSettings.AtRestEncryptKey
}
- if *cfg.ElasticSearchSettings.ConnectionUrl == model.FAKE_SETTING {
- *cfg.ElasticSearchSettings.ConnectionUrl = *Cfg.ElasticSearchSettings.ConnectionUrl
- }
- if *cfg.ElasticSearchSettings.Username == model.FAKE_SETTING {
- *cfg.ElasticSearchSettings.Username = *Cfg.ElasticSearchSettings.Username
- }
if *cfg.ElasticSearchSettings.Password == model.FAKE_SETTING {
*cfg.ElasticSearchSettings.Password = *Cfg.ElasticSearchSettings.Password
}
diff --git a/webapp/actions/admin_actions.jsx b/webapp/actions/admin_actions.jsx
index fdaeb8732..ac02ac058 100644
--- a/webapp/actions/admin_actions.jsx
+++ b/webapp/actions/admin_actions.jsx
@@ -383,3 +383,16 @@ export function getPostsPerDayAnalytics(teamId) {
export function getUsersPerDayAnalytics(teamId) {
AdminActions.getUsersPerDayAnalytics(teamId)(dispatch, getState);
}
+
+export function elasticsearchTest(config, success, error) {
+ AdminActions.testElasticsearch(config)(dispatch, getState).then(
+ (data) => {
+ if (data && success) {
+ success(data);
+ } else if (data == null && error) {
+ const serverError = getState().requests.admin.testElasticsearch.error;
+ error({id: serverError.server_error_id, ...serverError});
+ }
+ }
+ );
+}
diff --git a/webapp/actions/file_actions.jsx b/webapp/actions/file_actions.jsx
index 9a565a07c..1d9617901 100644
--- a/webapp/actions/file_actions.jsx
+++ b/webapp/actions/file_actions.jsx
@@ -6,6 +6,8 @@ import request from 'superagent';
import store from 'stores/redux_store.jsx';
+import * as Utils from 'utils/utils.jsx';
+
import {FileTypes} from 'mattermost-redux/action_types';
import {forceLogoutIfNecessary} from 'mattermost-redux/actions/helpers';
import {getLogErrorAction} from 'mattermost-redux/actions/errors';
@@ -20,9 +22,9 @@ export function uploadFile(file, name, channelId, clientId, successCallback, err
if (res && res.body && res.body.id) {
e = res.body;
} else if (err.status === 0 || !err.status) {
- e = {message: this.translations.connectionError};
+ e = {message: Utils.localizeMessage('channel_loader.connection_error', 'There appears to be a problem with your internet connection.')};
} else {
- e = {message: this.translations.unknownError + ' (' + err.status + ')'};
+ e = {message: Utils.localizeMessage('channel_loader.unknown_error', 'We received an unexpected status code from the server.') + ' (' + err.status + ')'};
}
forceLogoutIfNecessary(err, dispatch);
diff --git a/webapp/components/admin_console/admin_sidebar.jsx b/webapp/components/admin_console/admin_sidebar.jsx
index 1dbbff2f2..9b27ab81e 100644
--- a/webapp/components/admin_console/admin_sidebar.jsx
+++ b/webapp/components/admin_console/admin_sidebar.jsx
@@ -258,6 +258,21 @@ export default class AdminSidebar extends React.Component {
/>
);
+ let elasticSearchSettings = null;
+ if (window.mm_license.IsLicensed === 'true') {
+ elasticSearchSettings = (
+ <AdminSidebarSection
+ name='elasticsearch'
+ title={
+ <FormattedMessage
+ id='admin.sidebar.elasticsearch'
+ defaultMessage='Elasticsearch'
+ />
+ }
+ />
+ );
+ }
+
return (
<div className='admin-sidebar'>
<AdminSidebarHeader/>
@@ -618,6 +633,7 @@ export default class AdminSidebar extends React.Component {
/>
}
/>
+ {elasticSearchSettings}
<AdminSidebarSection
name='developer'
title={
diff --git a/webapp/components/admin_console/elasticsearch_settings.jsx b/webapp/components/admin_console/elasticsearch_settings.jsx
new file mode 100644
index 000000000..23ba14b25
--- /dev/null
+++ b/webapp/components/admin_console/elasticsearch_settings.jsx
@@ -0,0 +1,280 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import React from 'react';
+
+import * as Utils from 'utils/utils.jsx';
+
+import AdminSettings from './admin_settings.jsx';
+import {elasticsearchTest} from 'actions/admin_actions.jsx';
+import BooleanSetting from './boolean_setting.jsx';
+import {FormattedMessage} from 'react-intl';
+import SettingsGroup from './settings_group.jsx';
+import TextSetting from './text_setting.jsx';
+import RequestButton from './request_button/request_button.jsx';
+
+export default class ElasticsearchSettings extends AdminSettings {
+ constructor(props) {
+ super(props);
+
+ this.getConfigFromState = this.getConfigFromState.bind(this);
+
+ this.doTestConfig = this.doTestConfig.bind(this);
+ this.handleChange = this.handleChange.bind(this);
+
+ this.renderSettings = this.renderSettings.bind(this);
+ }
+
+ getConfigFromState(config) {
+ config.ElasticSearchSettings.ConnectionUrl = this.state.connectionUrl;
+ config.ElasticSearchSettings.Username = this.state.username;
+ config.ElasticSearchSettings.Password = this.state.password;
+ config.ElasticSearchSettings.Sniff = this.state.sniff;
+ config.ElasticSearchSettings.EnableIndexing = this.state.enableIndexing;
+ config.ElasticSearchSettings.EnableSearching = this.state.enableSearching;
+
+ return config;
+ }
+
+ getStateFromConfig(config) {
+ return {
+ connectionUrl: config.ElasticSearchSettings.ConnectionUrl,
+ username: config.ElasticSearchSettings.Username,
+ password: config.ElasticSearchSettings.Password,
+ sniff: config.ElasticSearchSettings.Sniff,
+ enableIndexing: config.ElasticSearchSettings.EnableIndexing,
+ enableSearching: config.ElasticSearchSettings.EnableSearching,
+ configTested: true,
+ canSave: true
+ };
+ }
+
+ handleChange(id, value) {
+ if (id === 'enableIndexing') {
+ if (value === false) {
+ this.setState({
+ enableSearching: false
+ });
+ } else {
+ this.setState({
+ canSave: false,
+ configTested: false
+ });
+ }
+ }
+
+ if (id === 'connectionUrl' || id === 'username' || id === 'password' || id === 'sniff') {
+ this.setState({
+ configTested: false,
+ canSave: false
+ });
+ }
+
+ super.handleChange(id, value);
+ }
+
+ canSave() {
+ return this.state.canSave;
+ }
+
+ doTestConfig(success, error) {
+ const config = JSON.parse(JSON.stringify(this.props.config));
+ this.getConfigFromState(config);
+
+ elasticsearchTest(
+ config,
+ () => {
+ this.setState({
+ configTested: true,
+ canSave: true
+ });
+ success();
+ },
+ (err) => {
+ this.setState({
+ configTested: false,
+ canSave: false
+ });
+ error(err);
+ }
+ );
+ }
+
+ renderTitle() {
+ return (
+ <FormattedMessage
+ id='admin.elasticsearch.title'
+ defaultMessage='Elasticsearch Settings'
+ />
+ );
+ }
+
+ renderSettings() {
+ return (
+ <SettingsGroup>
+ <div className='banner'>
+ <div className='banner__content'>
+ <FormattedMessage
+ id='admin.elasticsearch.noteDescription'
+ defaultMessage='Changing properties in this section will require a server restart before taking effect.'
+ />
+ </div>
+ </div>
+ <BooleanSetting
+ id='enableIndexing'
+ label={
+ <FormattedMessage
+ id='admin.elasticsearch.enableIndexingTitle'
+ defaultMessage='Enable Elasticsearch Indexing:'
+ />
+ }
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.enableIndexingDescription'
+ defaultMessage='When true, indexing of new posts occurs automatically. Search queries will use database search until "Enable Elasticsearch for search queries" is enabled. {documentationLink}'
+ values={{
+ documentationLink: (
+ <a
+ href='http://www.mattermost.com'
+ rel='noopener noreferrer'
+ target='_blank'
+ >
+ <FormattedMessage
+ id='admin.elasticsearch.enableIndexingDescription.documentationLinkText'
+ defaultMessage='Learn more about Elasticsearch in our documentation.'
+ />
+ </a>
+ )
+ }}
+ />
+ }
+ value={this.state.enableIndexing}
+ onChange={this.handleChange}
+ />
+ <TextSetting
+ id='connectionUrl'
+ label={
+ <FormattedMessage
+ id='admin.elasticsearch.connectionUrlTitle'
+ defaultMessage='Server Connection Address:'
+ />
+ }
+ placeholder={Utils.localizeMessage('admin.elasticsearch.connectionUrlExample', 'E.g.: "https://elasticsearch.example.org:9200"')}
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.connectionUrlDescription'
+ defaultMessage='The address of the Elasticsearch server. {documentationLink}'
+ values={{
+ documentationLink: (
+ <a
+ href='http://www.mattermost.com'
+ rel='noopener noreferrer'
+ target='_blank'
+ >
+ <FormattedMessage
+ id='admin.elasticsearch.connectionUrlExample.documentationLinkText'
+ defaultMessage='Please see documentation with server setup instructions.'
+ />
+ </a>
+ )
+ }}
+ />
+ }
+ value={this.state.connectionUrl}
+ disabled={!this.state.enableIndexing}
+ onChange={this.handleChange}
+ />
+ <TextSetting
+ id='username'
+ label={
+ <FormattedMessage
+ id='admin.elasticsearch.usernameTitle'
+ defaultMessage='Server Username:'
+ />
+ }
+ placeholder={Utils.localizeMessage('admin.elasticsearch.usernameExample', 'E.g.: "elastic"')}
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.usernameDescription'
+ defaultMessage='(Optional) The username to authenticate to the Elasticsearch server.'
+ />
+ }
+ value={this.state.username}
+ disabled={!this.state.enableIndexing}
+ onChange={this.handleChange}
+ />
+ <TextSetting
+ id='password'
+ label={
+ <FormattedMessage
+ id='admin.elasticsearch.passwordTitle'
+ defaultMessage='Server Password:'
+ />
+ }
+ placeholder={Utils.localizeMessage('admin.elasticsearch.password', 'E.g.: "yourpassword"')}
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.passwordDescription'
+ defaultMessage='(Optional) The password to authenticate to the Elasticsearch server.'
+ />
+ }
+ value={this.state.password}
+ disabled={!this.state.enableIndexing}
+ onChange={this.handleChange}
+ />
+ <BooleanSetting
+ id='sniff'
+ label={
+ <FormattedMessage
+ id='admin.elasticsearch.sniffTitle'
+ defaultMessage='Enable Cluster Sniffing:'
+ />
+ }
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.sniffDescription'
+ defaultMessage='When true, sniffing finds and connects to all data nodes in your cluster automatically.'
+ />
+ }
+ value={this.state.sniff}
+ disabled={!this.state.enableIndexing}
+ onChange={this.handleChange}
+ />
+ <RequestButton
+ requestAction={this.doTestConfig}
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.testHelpText'
+ defaultMessage='Tests if the Mattermost server can connect to the Elasticsearch server specified. Testing the connection does not save the configuration. See log file for more detailed error messages.'
+ />
+ }
+ buttonText={
+ <FormattedMessage
+ id='admin.elasticsearch.elasticsearch_test_button'
+ defaultMessage='Test Connection'
+ />
+ }
+ disabled={!this.state.enableIndexing}
+ />
+ <BooleanSetting
+ id='enableSearching'
+ label={
+ <FormattedMessage
+ id='admin.elasticsearch.enableSearchingTitle'
+ defaultMessage='Enable Elasticsearch for search queries:'
+ />
+ }
+ helpText={
+ <FormattedMessage
+ id='admin.elasticsearch.enableSearchingDescription'
+ defaultMessage='Requires a successful connection to the Elasticsearch server. When true, Elasticsearch will be used for all search queries using the latest index. Search results may be incomplete until a bulk index of the existing post database is finished. When false, database search is used.'
+ />
+ }
+ value={this.state.enableSearching}
+ disabled={!this.state.enableIndexing || !this.state.configTested}
+ onChange={this.handleChange}
+ />
+ </SettingsGroup>
+ );
+ }
+}
diff --git a/webapp/components/admin_console/manage_teams_modal/manage_teams_dropdown.jsx b/webapp/components/admin_console/manage_teams_modal/manage_teams_dropdown.jsx
index e58a2c43d..4ee3c11cd 100644
--- a/webapp/components/admin_console/manage_teams_modal/manage_teams_dropdown.jsx
+++ b/webapp/components/admin_console/manage_teams_modal/manage_teams_dropdown.jsx
@@ -38,8 +38,8 @@ export default class ManageTeamsDropdown extends React.Component {
}
toggleDropdown() {
- this.setState({
- show: !this.state.show
+ this.setState((prevState) => {
+ return {show: !prevState.show};
});
}
diff --git a/webapp/components/admin_console/push_settings.jsx b/webapp/components/admin_console/push_settings.jsx
index c0ce64f8a..3b21f727a 100644
--- a/webapp/components/admin_console/push_settings.jsx
+++ b/webapp/components/admin_console/push_settings.jsx
@@ -79,8 +79,6 @@ export default class PushSettings extends AdminSettings {
agree = true;
} else if (config.EmailSettings.PushNotificationServer === Constants.MTPNS) {
pushNotificationServerType = PUSH_NOTIFICATIONS_MTPNS;
- } else {
- pushNotificationServerType = PUSH_NOTIFICATIONS_CUSTOM;
}
let pushNotificationServer = config.EmailSettings.PushNotificationServer;
diff --git a/webapp/components/file_preview.jsx b/webapp/components/file_preview.jsx
index 65a71c047..0606c1b31 100644
--- a/webapp/components/file_preview.jsx
+++ b/webapp/components/file_preview.jsx
@@ -3,7 +3,7 @@
import ReactDOM from 'react-dom';
import * as Utils from 'utils/utils.jsx';
-import {getFileUrl} from 'mattermost-redux/utils/file_utils';
+import {getFileThumbnailUrl} from 'mattermost-redux/utils/file_utils';
import PropTypes from 'prop-types';
@@ -39,7 +39,7 @@ export default class FilePreview extends React.Component {
previewImage = (
<img
className='file-preview__image'
- src={getFileUrl(info.id)}
+ src={getFileThumbnailUrl(info.id)}
/>
);
} else {
diff --git a/webapp/components/integrations/components/abstract_outgoing_webhook.jsx b/webapp/components/integrations/components/abstract_outgoing_webhook.jsx
index 912ad3bdf..397423395 100644
--- a/webapp/components/integrations/components/abstract_outgoing_webhook.jsx
+++ b/webapp/components/integrations/components/abstract_outgoing_webhook.jsx
@@ -16,61 +16,81 @@ import {Link} from 'react-router/es6';
import SpinnerButton from 'components/spinner_button.jsx';
export default class AbstractOutgoingWebhook extends React.Component {
- static get propTypes() {
- return {
- team: PropTypes.object
- };
+ static propTypes = {
+
+ /**
+ * The current team
+ */
+ team: PropTypes.object.isRequired,
+
+ /**
+ * The header text to render, has id and defaultMessage
+ */
+ header: PropTypes.object.isRequired,
+
+ /**
+ * The footer text to render, has id and defaultMessage
+ */
+ footer: PropTypes.object.isRequired,
+
+ /**
+ * Any extra component/node to render
+ */
+ renderExtra: PropTypes.node.isRequired,
+
+ /**
+ * The server error text after a failed action
+ */
+ serverError: PropTypes.string.isRequired,
+
+ /**
+ * The hook used to set the initial state
+ */
+ initialHook: PropTypes.object,
+
+ /**
+ * The async function to run when the action button is pressed
+ */
+ action: PropTypes.func.isRequired
}
constructor(props) {
super(props);
- this.handleSubmit = this.handleSubmit.bind(this);
-
- this.updateDisplayName = this.updateDisplayName.bind(this);
- this.updateDescription = this.updateDescription.bind(this);
- this.updateContentType = this.updateContentType.bind(this);
- this.updateChannelId = this.updateChannelId.bind(this);
- this.updateTriggerWords = this.updateTriggerWords.bind(this);
- this.updateTriggerWhen = this.updateTriggerWhen.bind(this);
- this.updateCallbackUrls = this.updateCallbackUrls.bind(this);
-
- this.state = {
- displayName: '',
- description: '',
- contentType: 'application/x-www-form-urlencoded',
- channelId: '',
- triggerWords: '',
- triggerWhen: 0,
- callbackUrls: '',
- saving: false,
- serverError: '',
- clientError: null
- };
-
- if (typeof this.performAction === 'undefined') {
- throw new TypeError('Subclasses must override performAction');
- }
-
- if (typeof this.header === 'undefined') {
- throw new TypeError('Subclasses must override header');
- }
+ this.state = this.getStateFromHook(this.props.initialHook || {});
+ }
- if (typeof this.footer === 'undefined') {
- throw new TypeError('Subclasses must override footer');
+ getStateFromHook = (hook) => {
+ let triggerWords = '';
+ if (hook.trigger_words) {
+ let i = 0;
+ for (i = 0; i < hook.trigger_words.length; i++) {
+ triggerWords += hook.trigger_words[i] + '\n';
+ }
}
- if (typeof this.renderExtra === 'undefined') {
- throw new TypeError('Subclasses must override renderExtra');
+ let callbackUrls = '';
+ if (hook.callback_urls) {
+ let i = 0;
+ for (i = 0; i < hook.callback_urls.length; i++) {
+ callbackUrls += hook.callback_urls[i] + '\n';
+ }
}
- this.performAction = this.performAction.bind(this);
- this.header = this.header.bind(this);
- this.footer = this.footer.bind(this);
- this.renderExtra = this.renderExtra.bind(this);
+ return {
+ displayName: hook.display_name || '',
+ description: hook.description || '',
+ contentType: hook.content_type || 'application/x-www-form-urlencoded',
+ channelId: hook.channel_id || '',
+ triggerWords,
+ triggerWhen: hook.trigger_when || 0,
+ callbackUrls,
+ saving: false,
+ clientError: null
+ };
}
- handleSubmit(e) {
+ handleSubmit = (e) => {
e.preventDefault();
if (this.state.saving) {
@@ -79,7 +99,6 @@ export default class AbstractOutgoingWebhook extends React.Component {
this.setState({
saving: true,
- serverError: '',
clientError: ''
});
@@ -142,46 +161,46 @@ export default class AbstractOutgoingWebhook extends React.Component {
description: this.state.description
};
- this.performAction(hook);
+ this.props.action(hook).then(() => this.setState({saving: false}));
}
- updateDisplayName(e) {
+ updateDisplayName = (e) => {
this.setState({
displayName: e.target.value
});
}
- updateDescription(e) {
+ updateDescription = (e) => {
this.setState({
description: e.target.value
});
}
- updateContentType(e) {
+ updateContentType = (e) => {
this.setState({
contentType: e.target.value
});
}
- updateChannelId(e) {
+ updateChannelId = (e) => {
this.setState({
channelId: e.target.value
});
}
- updateTriggerWords(e) {
+ updateTriggerWords = (e) => {
this.setState({
triggerWords: e.target.value
});
}
- updateTriggerWhen(e) {
+ updateTriggerWhen = (e) => {
this.setState({
triggerWhen: e.target.value
});
}
- updateCallbackUrls(e) {
+ updateCallbackUrls = (e) => {
this.setState({
callbackUrls: e.target.value
});
@@ -191,9 +210,9 @@ export default class AbstractOutgoingWebhook extends React.Component {
const contentTypeOption1 = 'application/x-www-form-urlencoded';
const contentTypeOption2 = 'application/json';
- var headerToRender = this.header();
- var footerToRender = this.footer();
- var renderExtra = this.renderExtra();
+ var headerToRender = this.props.header;
+ var footerToRender = this.props.footer;
+ var renderExtra = this.props.renderExtra;
return (
<div className='backstage-content'>
@@ -432,7 +451,7 @@ export default class AbstractOutgoingWebhook extends React.Component {
<div className='backstage-form__footer'>
<FormError
type='backstage'
- errors={[this.state.serverError, this.state.clientError]}
+ errors={[this.props.serverError, this.state.clientError]}
/>
<Link
className='btn btn-sm'
diff --git a/webapp/components/integrations/components/add_outgoing_webhook.jsx b/webapp/components/integrations/components/add_outgoing_webhook.jsx
deleted file mode 100644
index d7f338587..000000000
--- a/webapp/components/integrations/components/add_outgoing_webhook.jsx
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
-// See License.txt for license information.
-
-import {addOutgoingHook} from 'actions/integration_actions.jsx';
-import {browserHistory} from 'react-router/es6';
-
-import AbstractOutgoingWebhook from './abstract_outgoing_webhook.jsx';
-
-export default class AddOutgoingWebhook extends AbstractOutgoingWebhook {
- performAction(hook) {
- addOutgoingHook(
- hook,
- (data) => {
- browserHistory.push(`/${this.props.team.name}/integrations/confirm?type=outgoing_webhooks&id=${data.id}`);
- },
- (err) => {
- this.setState({
- saving: false,
- serverError: err.message
- });
- }
- );
- }
-
- header() {
- return {id: 'integrations.add', defaultMessage: 'Add'};
- }
-
- footer() {
- return {id: 'add_outgoing_webhook.save', defaultMessage: 'Save'};
- }
-
- renderExtra() {
- return '';
- }
-}
diff --git a/webapp/components/integrations/components/add_outgoing_webhook/add_outgoing_webhook.jsx b/webapp/components/integrations/components/add_outgoing_webhook/add_outgoing_webhook.jsx
new file mode 100644
index 000000000..41ab8a073
--- /dev/null
+++ b/webapp/components/integrations/components/add_outgoing_webhook/add_outgoing_webhook.jsx
@@ -0,0 +1,69 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import AbstractOutgoingWebhook from 'components/integrations/components/abstract_outgoing_webhook.jsx';
+
+import React from 'react';
+import {browserHistory} from 'react-router/es6';
+import PropTypes from 'prop-types';
+
+const HEADER = {id: 'integrations.add', defaultMessage: 'Add'};
+const FOOTER = {id: 'add_outgoing_webhook.save', defaultMessage: 'Save'};
+
+export default class AddOutgoingWebhook extends React.PureComponent {
+ static propTypes = {
+
+ /**
+ * The current team
+ */
+ team: PropTypes.object.isRequired,
+
+ /**
+ * The request state for createOutgoingHook action. Contains status and error
+ */
+ createOutgoingHookRequest: PropTypes.object.isRequired,
+
+ actions: PropTypes.shape({
+
+ /**
+ * The function to call to add a new outgoing webhook
+ */
+ createOutgoingHook: PropTypes.func.isRequired
+ }).isRequired
+ }
+
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ serverError: ''
+ };
+ }
+
+ addOutgoingHook = async (hook) => {
+ this.setState({serverError: ''});
+
+ const data = await this.props.actions.createOutgoingHook(hook);
+ if (data) {
+ browserHistory.push(`/${this.props.team.name}/integrations/confirm?type=outgoing_webhooks&id=${data.id}`);
+ return;
+ }
+
+ if (this.props.createOutgoingHookRequest.error) {
+ this.setState({serverError: this.props.createOutgoingHookRequest.error.message});
+ }
+ }
+
+ render() {
+ return (
+ <AbstractOutgoingWebhook
+ team={this.props.team}
+ header={HEADER}
+ footer={FOOTER}
+ renderExtra={''}
+ action={this.addOutgoingHook}
+ serverError={this.state.serverError}
+ />
+ );
+ }
+}
diff --git a/webapp/components/integrations/components/add_outgoing_webhook/index.js b/webapp/components/integrations/components/add_outgoing_webhook/index.js
new file mode 100644
index 000000000..f930ac81f
--- /dev/null
+++ b/webapp/components/integrations/components/add_outgoing_webhook/index.js
@@ -0,0 +1,25 @@
+// Copyright (c) 2017 Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import {connect} from 'react-redux';
+import {bindActionCreators} from 'redux';
+import {createOutgoingHook} from 'mattermost-redux/actions/integrations';
+
+import AddOutgoingWebhook from './add_outgoing_webhook.jsx';
+
+function mapStateToProps(state, ownProps) {
+ return {
+ ...ownProps,
+ createOutgoingHookRequest: state.requests.integrations.createOutgoingHook
+ };
+}
+
+function mapDispatchToProps(dispatch) {
+ return {
+ actions: bindActionCreators({
+ createOutgoingHook
+ }, dispatch)
+ };
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(AddOutgoingWebhook);
diff --git a/webapp/components/integrations/components/edit_outgoing_webhook.jsx b/webapp/components/integrations/components/edit_outgoing_webhook.jsx
deleted file mode 100644
index 2f56d1eae..000000000
--- a/webapp/components/integrations/components/edit_outgoing_webhook.jsx
+++ /dev/null
@@ -1,183 +0,0 @@
-// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
-// See License.txt for license information.
-
-import React from 'react';
-
-import {browserHistory} from 'react-router/es6';
-import IntegrationStore from 'stores/integration_store.jsx';
-import {loadOutgoingHooks, updateOutgoingHook} from 'actions/integration_actions.jsx';
-
-import AbstractOutgoingWebhook from './abstract_outgoing_webhook.jsx';
-import ConfirmModal from 'components/confirm_modal.jsx';
-import {FormattedMessage} from 'react-intl';
-import TeamStore from 'stores/team_store.jsx';
-
-export default class EditOutgoingWebhook extends AbstractOutgoingWebhook {
- constructor(props) {
- super(props);
-
- this.handleIntegrationChange = this.handleIntegrationChange.bind(this);
- this.handleConfirmModal = this.handleConfirmModal.bind(this);
- this.handleUpdate = this.handleUpdate.bind(this);
- this.submitCommand = this.submitCommand.bind(this);
- this.confirmModalDismissed = this.confirmModalDismissed.bind(this);
- this.originalOutgoingHook = null;
-
- this.state = {
- showConfirmModal: false
- };
- }
-
- componentDidMount() {
- IntegrationStore.addChangeListener(this.handleIntegrationChange);
-
- if (window.mm_config.EnableOutgoingWebhooks === 'true') {
- loadOutgoingHooks();
- }
- }
-
- componentWillUnmount() {
- IntegrationStore.removeChangeListener(this.handleIntegrationChange);
- }
-
- handleIntegrationChange() {
- const teamId = TeamStore.getCurrentId();
-
- const hooks = IntegrationStore.getOutgoingWebhooks(teamId);
- const loading = !IntegrationStore.hasReceivedOutgoingWebhooks(teamId);
-
- if (!loading) {
- this.originalOutgoingHook = hooks.filter((hook) => hook.id === this.props.location.query.id)[0];
-
- var triggerWords = '';
- if (this.originalOutgoingHook.trigger_words) {
- let i = 0;
- for (i = 0; i < this.originalOutgoingHook.trigger_words.length; i++) {
- triggerWords += this.originalOutgoingHook.trigger_words[i] + '\n';
- }
- }
-
- var callbackUrls = '';
- if (this.originalOutgoingHook.callback_urls) {
- let i = 0;
- for (i = 0; i < this.originalOutgoingHook.callback_urls.length; i++) {
- callbackUrls += this.originalOutgoingHook.callback_urls[i] + '\n';
- }
- }
-
- this.setState({
- displayName: this.originalOutgoingHook.display_name,
- description: this.originalOutgoingHook.description,
- channelId: this.originalOutgoingHook.channel_id,
- contentType: this.originalOutgoingHook.content_type,
- triggerWhen: this.originalOutgoingHook.trigger_when,
- triggerWords,
- callbackUrls
- });
- }
- }
-
- performAction(hook) {
- this.newHook = hook;
-
- if (this.originalOutgoingHook.id) {
- hook.id = this.originalOutgoingHook.id;
- }
-
- if (this.originalOutgoingHook.token) {
- hook.token = this.originalOutgoingHook.token;
- }
-
- var triggerWordsSame = (this.originalOutgoingHook.trigger_words.length === hook.trigger_words.length) &&
- this.originalOutgoingHook.trigger_words.every((v, i) => v === hook.trigger_words[i]);
-
- var callbackUrlsSame = (this.originalOutgoingHook.callback_urls.length === hook.callback_urls.length) &&
- this.originalOutgoingHook.callback_urls.every((v, i) => v === hook.callback_urls[i]);
-
- if (this.originalOutgoingHook.content_type !== hook.content_type ||
- !triggerWordsSame || !callbackUrlsSame) {
- this.handleConfirmModal();
- this.setState({
- saving: false
- });
- } else {
- this.submitCommand();
- }
- }
-
- handleUpdate() {
- this.setState({
- saving: true,
- serverError: '',
- clientError: ''
- });
-
- this.submitCommand();
- }
-
- handleConfirmModal() {
- this.setState({showConfirmModal: true});
- }
-
- confirmModalDismissed() {
- this.setState({showConfirmModal: false});
- }
-
- submitCommand() {
- updateOutgoingHook(
- this.newHook,
- () => {
- browserHistory.push(`/${this.props.team.name}/integrations/outgoing_webhooks`);
- },
- (err) => {
- this.setState({
- saving: false,
- showConfirmModal: false,
- serverError: err.message
- });
- }
- );
- }
-
- header() {
- return {id: 'integrations.edit', defaultMessage: 'Edit'};
- }
-
- footer() {
- return {id: 'update_outgoing_webhook.update', defaultMessage: 'Update'};
- }
-
- renderExtra() {
- const confirmButton = (
- <FormattedMessage
- id='update_outgoing_webhook.update'
- defaultMessage='Update'
- />
- );
-
- const confirmTitle = (
- <FormattedMessage
- id='update_outgoing_webhook.confirm'
- defaultMessage='Edit Outgoing Webhook'
- />
- );
-
- const confirmMessage = (
- <FormattedMessage
- id='update_outgoing_webhook.question'
- defaultMessage='Your changes may break the existing outgoing webhook. Are you sure you would like to update it?'
- />
- );
-
- return (
- <ConfirmModal
- title={confirmTitle}
- message={confirmMessage}
- confirmButtonText={confirmButton}
- show={this.state.showConfirmModal}
- onConfirm={this.handleUpdate}
- onCancel={this.confirmModalDismissed}
- />
- );
- }
-}
diff --git a/webapp/components/integrations/components/edit_outgoing_webhook/edit_outgoing_webhook.jsx b/webapp/components/integrations/components/edit_outgoing_webhook/edit_outgoing_webhook.jsx
new file mode 100644
index 000000000..9b2dbff0a
--- /dev/null
+++ b/webapp/components/integrations/components/edit_outgoing_webhook/edit_outgoing_webhook.jsx
@@ -0,0 +1,169 @@
+// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import AbstractOutgoingWebhook from 'components/integrations/components/abstract_outgoing_webhook.jsx';
+import ConfirmModal from 'components/confirm_modal.jsx';
+import LoadingScreen from 'components/loading_screen.jsx';
+
+import React from 'react';
+import PropTypes from 'prop-types';
+import {browserHistory} from 'react-router/es6';
+import {FormattedMessage} from 'react-intl';
+
+const HEADER = {id: 'integrations.edit', defaultMessage: 'Edit'};
+const FOOTER = {id: 'update_outgoing_webhook.update', defaultMessage: 'Update'};
+
+export default class EditOutgoingWebhook extends React.PureComponent {
+ static propTypes = {
+
+ /**
+ * The current team
+ */
+ team: PropTypes.object.isRequired,
+
+ /**
+ * The outgoing webhook to edit
+ */
+ hook: PropTypes.object,
+
+ /**
+ * The id of the outgoing webhook to edit
+ */
+ hookId: PropTypes.string.isRequired,
+
+ /**
+ * The request state for updateOutgoingHook action. Contains status and error
+ */
+ updateOutgoingHookRequest: PropTypes.object.isRequired,
+
+ actions: PropTypes.shape({
+
+ /**
+ * The function to call to update an outgoing webhook
+ */
+ updateOutgoingHook: PropTypes.func.isRequired,
+
+ /**
+ * The function to call to get an outgoing webhook
+ */
+ getOutgoingHook: PropTypes.func.isRequired
+ }).isRequired
+ }
+
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ showConfirmModal: false,
+ serverError: ''
+ };
+ }
+
+ componentDidMount() {
+ if (window.mm_config.EnableOutgoingWebhooks === 'true') {
+ this.props.actions.getOutgoingHook(this.props.hookId);
+ }
+ }
+
+ editOutgoingHook = async (hook) => {
+ this.newHook = hook;
+
+ if (this.props.hook.id) {
+ hook.id = this.props.hook.id;
+ }
+
+ if (this.props.hook.token) {
+ hook.token = this.props.hook.token;
+ }
+
+ const triggerWordsSame = (this.props.hook.trigger_words.length === hook.trigger_words.length) &&
+ this.props.hook.trigger_words.every((v, i) => v === hook.trigger_words[i]);
+
+ const callbackUrlsSame = (this.props.hook.callback_urls.length === hook.callback_urls.length) &&
+ this.props.hook.callback_urls.every((v, i) => v === hook.callback_urls[i]);
+
+ if (this.props.hook.content_type !== hook.content_type ||
+ !triggerWordsSame || !callbackUrlsSame) {
+ this.handleConfirmModal();
+ } else {
+ await this.submitHook();
+ }
+ }
+
+ handleConfirmModal = () => {
+ this.setState({showConfirmModal: true});
+ }
+
+ confirmModalDismissed = () => {
+ this.setState({showConfirmModal: false});
+ }
+
+ submitHook = async () => {
+ this.setState({serverError: ''});
+
+ const data = await this.props.actions.updateOutgoingHook(this.newHook);
+
+ if (data) {
+ browserHistory.push(`/${this.props.team.name}/integrations/outgoing_webhooks`);
+ return;
+ }
+
+ this.setState({showConfirmModal: false});
+
+ if (this.props.updateOutgoingHookRequest.error) {
+ this.setState({serverError: this.props.updateOutgoingHookRequest.error.message});
+ }
+ }
+
+ renderExtra = () => {
+ const confirmButton = (
+ <FormattedMessage
+ id='update_outgoing_webhook.update'
+ defaultMessage='Update'
+ />
+ );
+
+ const confirmTitle = (
+ <FormattedMessage
+ id='update_outgoing_webhook.confirm'
+ defaultMessage='Edit Outgoing Webhook'
+ />
+ );
+
+ const confirmMessage = (
+ <FormattedMessage
+ id='update_outgoing_webhook.question'
+ defaultMessage='Your changes may break the existing outgoing webhook. Are you sure you would like to update it?'
+ />
+ );
+
+ return (
+ <ConfirmModal
+ title={confirmTitle}
+ message={confirmMessage}
+ confirmButtonText={confirmButton}
+ show={this.state.showConfirmModal}
+ onConfirm={this.submitHook}
+ onCancel={this.confirmModalDismissed}
+ />
+ );
+ }
+
+ render() {
+ if (!this.props.hook) {
+ return <LoadingScreen/>;
+ }
+
+ return (
+ <AbstractOutgoingWebhook
+ team={this.props.team}
+ header={HEADER}
+ footer={FOOTER}
+ renderExtra={this.renderExtra()}
+ action={this.editOutgoingHook}
+ serverError={this.state.serverError}
+ initialHook={this.props.hook}
+ />
+ );
+ }
+}
diff --git a/webapp/components/integrations/components/edit_outgoing_webhook/index.js b/webapp/components/integrations/components/edit_outgoing_webhook/index.js
new file mode 100644
index 000000000..a526ac76c
--- /dev/null
+++ b/webapp/components/integrations/components/edit_outgoing_webhook/index.js
@@ -0,0 +1,30 @@
+// Copyright (c) 2017 Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import {connect} from 'react-redux';
+import {bindActionCreators} from 'redux';
+import {updateOutgoingHook, getOutgoingHook} from 'mattermost-redux/actions/integrations';
+
+import EditOutgoingWebhook from './edit_outgoing_webhook.jsx';
+
+function mapStateToProps(state, ownProps) {
+ const hookId = ownProps.location.query.id;
+
+ return {
+ ...ownProps,
+ hookId,
+ hook: state.entities.integrations.outgoingHooks[hookId],
+ updateOutgoingHookRequest: state.requests.integrations.createOutgoingHook
+ };
+}
+
+function mapDispatchToProps(dispatch) {
+ return {
+ actions: bindActionCreators({
+ updateOutgoingHook,
+ getOutgoingHook
+ }, dispatch)
+ };
+}
+
+export default connect(mapStateToProps, mapDispatchToProps)(EditOutgoingWebhook);
diff --git a/webapp/components/post_view/post_attachment.jsx b/webapp/components/post_view/post_attachment.jsx
index b7bd1ade9..d7b1ee774 100644
--- a/webapp/components/post_view/post_attachment.jsx
+++ b/webapp/components/post_view/post_attachment.jsx
@@ -55,10 +55,11 @@ export default class PostAttachment extends React.PureComponent {
toggleCollapseState(e) {
e.preventDefault();
-
- this.setState({
- text: this.state.collapsed ? this.state.uncollapsedText : this.state.collapsedText,
- collapsed: !this.state.collapsed
+ this.setState((prevState) => {
+ return {
+ text: prevState.collapsed ? prevState.uncollapsedText : prevState.collapsedText,
+ collapsed: !prevState.collapsed
+ };
});
}
diff --git a/webapp/components/post_view/post_body_additional_content.jsx b/webapp/components/post_view/post_body_additional_content.jsx
index 485e63967..be9e37827 100644
--- a/webapp/components/post_view/post_body_additional_content.jsx
+++ b/webapp/components/post_view/post_body_additional_content.jsx
@@ -55,14 +55,18 @@ export default class PostBodyAdditionalContent extends React.PureComponent {
}
componentWillReceiveProps(nextProps) {
- this.setState({
- embedVisible: nextProps.previewCollapsed.startsWith('false'),
- link: Utils.extractFirstLink(nextProps.post.message)
- });
+ if (nextProps.previewCollapsed !== this.props.previewCollapsed || nextProps.post.message !== this.props.post.message) {
+ this.setState({
+ embedVisible: nextProps.previewCollapsed.startsWith('false'),
+ link: Utils.extractFirstLink(nextProps.post.message)
+ });
+ }
}
toggleEmbedVisibility() {
- this.setState({embedVisible: !this.state.embedVisible});
+ this.setState((prevState) => {
+ return {embedVisible: !prevState.embedVisible};
+ });
}
getSlackAttachment() {
diff --git a/webapp/components/setting_picture.jsx b/webapp/components/setting_picture.jsx
index faa463cc7..ec6dfbd20 100644
--- a/webapp/components/setting_picture.jsx
+++ b/webapp/components/setting_picture.jsx
@@ -5,6 +5,7 @@ import PropTypes from 'prop-types';
import React, {Component} from 'react';
import {FormattedMessage} from 'react-intl';
+import exif2css from 'exif2css';
import FormError from 'components/form_error.jsx';
import loadingGif from 'images/load.gif';
@@ -41,26 +42,89 @@ export default class SettingPicture extends Component {
}
}
+ componentWillUnmount() {
+ if (this.previewBlob) {
+ URL.revokeObjectURL(this.previewBlob);
+ }
+ }
+
setPicture = (file) => {
if (file) {
- var reader = new FileReader();
+ this.previewBlob = URL.createObjectURL(file);
+ var reader = new FileReader();
reader.onload = (e) => {
+ const orientation = this.getExifOrientation(e.target.result);
+ const orientationStyles = this.getOrientationStyles(orientation);
+
this.setState({
- image: e.target.result
+ image: this.previewBlob,
+ orientationStyles
});
};
- reader.readAsDataURL(file);
+ reader.readAsArrayBuffer(file);
+ }
+ }
+
+ // based on https://stackoverflow.com/questions/7584794/accessing-jpeg-exif-rotation-data-in-javascript-on-the-client-side/32490603#32490603
+ getExifOrientation(data) {
+ var view = new DataView(data);
+
+ if (view.getUint16(0, false) !== 0xFFD8) {
+ return -2;
+ }
+
+ var length = view.byteLength;
+ var offset = 2;
+
+ while (offset < length) {
+ var marker = view.getUint16(offset, false);
+ offset += 2;
+
+ if (marker === 0xFFE1) {
+ if (view.getUint32(offset += 2, false) !== 0x45786966) {
+ return -1;
+ }
+
+ var little = view.getUint16(offset += 6, false) === 0x4949;
+ offset += view.getUint32(offset + 4, little);
+ var tags = view.getUint16(offset, little);
+ offset += 2;
+
+ for (var i = 0; i < tags; i++) {
+ if (view.getUint16(offset + (i * 12), little) === 0x0112) {
+ return view.getUint16(offset + (i * 12) + 8, little);
+ }
+ }
+ } else if ((marker & 0xFF00) === 0xFF00) {
+ offset += view.getUint16(offset, false);
+ } else {
+ break;
+ }
}
+ return -1;
+ }
+
+ getOrientationStyles(orientation) {
+ const {
+ transform,
+ 'transform-origin': transformOrigin
+ } = exif2css(orientation);
+ return {transform, transformOrigin};
}
render() {
let img;
if (this.props.file) {
+ const imageStyles = {
+ backgroundImage: 'url(' + this.state.image + ')',
+ ...this.state.orientationStyles
+ };
+
img = (
<div
className='profile-img-preview'
- style={{backgroundImage: 'url(' + this.state.image + ')'}}
+ style={imageStyles}
/>
);
} else {
diff --git a/webapp/components/sidebar.jsx b/webapp/components/sidebar.jsx
index 16c96f1b6..798ce5691 100644
--- a/webapp/components/sidebar.jsx
+++ b/webapp/components/sidebar.jsx
@@ -309,14 +309,13 @@ export default class Sidebar extends React.Component {
curIndex = i;
}
}
- let nextChannel = allChannels[curIndex];
let nextIndex = curIndex;
if (e.keyCode === Constants.KeyCodes.DOWN) {
nextIndex = curIndex + 1;
} else if (e.keyCode === Constants.KeyCodes.UP) {
nextIndex = curIndex - 1;
}
- nextChannel = allChannels[Utils.mod(nextIndex, allChannels.length)];
+ const nextChannel = allChannels[Utils.mod(nextIndex, allChannels.length)];
ChannelActions.goToChannel(nextChannel);
this.updateScrollbarOnChannelChange(nextChannel);
this.isSwitchingChannel = false;
@@ -342,7 +341,6 @@ export default class Sidebar extends React.Component {
curIndex = i;
}
}
- let nextChannel = allChannels[curIndex];
let nextIndex = curIndex;
let count = 0;
let increment = 0;
@@ -359,7 +357,7 @@ export default class Sidebar extends React.Component {
unreadCounts = ChannelStore.getUnreadCount(allChannels[nextIndex].id);
}
if (unreadCounts.msgs !== 0 || unreadCounts.mentions !== 0) {
- nextChannel = allChannels[nextIndex];
+ const nextChannel = allChannels[nextIndex];
ChannelActions.goToChannel(nextChannel);
this.updateScrollbarOnChannelChange(nextChannel);
}
diff --git a/webapp/components/signup/components/signup_email.jsx b/webapp/components/signup/components/signup_email.jsx
index 25d2c25bd..872439eda 100644
--- a/webapp/components/signup/components/signup_email.jsx
+++ b/webapp/components/signup/components/signup_email.jsx
@@ -52,9 +52,9 @@ export default class SignupEmail extends React.Component {
let teamDisplayName = '';
let teamName = '';
let teamId = '';
- let loading = true;
- let serverError = '';
- let noOpenServerError = false;
+ let loading = false;
+ const serverError = '';
+ const noOpenServerError = false;
if (hash && hash.length > 0) {
const parsedData = JSON.parse(data);
@@ -62,37 +62,40 @@ export default class SignupEmail extends React.Component {
teamDisplayName = parsedData.display_name;
teamName = parsedData.name;
teamId = parsedData.id;
- loading = false;
} else if (inviteId && inviteId.length > 0) {
loading = true;
getInviteInfo(
inviteId,
(inviteData) => {
if (!inviteData) {
+ this.setState({loading: false});
return;
}
- serverError = '';
- teamDisplayName = inviteData.display_name;
- teamName = inviteData.name;
- teamId = inviteData.id;
+ this.setState({
+ loading: false,
+ serverError: '',
+ teamDisplayName: inviteData.display_name,
+ teamName: inviteData.name,
+ teamId: inviteData.id
+ });
},
() => {
- noOpenServerError = true;
- serverError = (
- <FormattedMessage
- id='signup_user_completed.invalid_invite'
- defaultMessage='The invite link was invalid. Please speak with your Administrator to receive an invitation.'
- />
- );
+ this.setState({
+ loading: false,
+ noOpenServerError: true,
+ serverError: (
+ <FormattedMessage
+ id='signup_user_completed.invalid_invite'
+ defaultMessage='The invite link was invalid. Please speak with your Administrator to receive an invitation.'
+ />
+ )
+ });
}
);
- loading = false;
data = null;
hash = null;
- } else {
- loading = false;
}
return {
diff --git a/webapp/components/textbox.jsx b/webapp/components/textbox.jsx
index 55980d331..536b1a115 100644
--- a/webapp/components/textbox.jsx
+++ b/webapp/components/textbox.jsx
@@ -157,7 +157,9 @@ export default class Textbox extends React.Component {
showPreview(e) {
e.preventDefault();
e.target.blur();
- this.setState({preview: !this.state.preview});
+ this.setState((prevState) => {
+ return {preview: prevState.preview};
+ });
}
hidePreview() {
diff --git a/webapp/components/webrtc/components/webrtc_sidebar.jsx b/webapp/components/webrtc/components/webrtc_sidebar.jsx
index c207ab489..82ac2d98a 100644
--- a/webapp/components/webrtc/components/webrtc_sidebar.jsx
+++ b/webapp/components/webrtc/components/webrtc_sidebar.jsx
@@ -76,7 +76,9 @@ export default class SidebarRight extends React.Component {
if (e) {
e.preventDefault();
}
- this.setState({expanded: !this.state.expanded});
+ this.setState((prevState) => {
+ return {expanded: !prevState.expanded};
+ });
}
onInitializeVideoCall(userId, isCaller) {
diff --git a/webapp/i18n/en.json b/webapp/i18n/en.json
index 87ec7486b..4a0f41a25 100755
--- a/webapp/i18n/en.json
+++ b/webapp/i18n/en.json
@@ -235,6 +235,27 @@
"admin.customization.support": "Legal and Support",
"admin.database.title": "Database Settings",
"admin.developer.title": "Developer Settings",
+ "admin.elasticsearch.title": "Elasticsearch Settings",
+ "admin.elasticsearch.noteDescription": "Changing properties in this section will require a server restart before taking effect.",
+ "admin.elasticsearch.enableIndexingTitle": "Enable Elasticsearch Indexing:",
+ "admin.elasticsearch.enableIndexingDescription": "When true, indexing of new posts occurs automatically. Search queries will use database search until \"Enable Elasticsearch for search queries\" is enabled. {documentationLink}",
+ "admin.elasticsearch.enableIndexingDescription.documentationLinkText": "Learn more about Elasticsearch in our documentation.",
+ "admin.elasticsearch.connectionUrlTitle": "Server Connection Address:",
+ "admin.elasticsearch.connectionUrlDescription": "The address of the Elasticsearch server. {documentationLink}",
+ "admin.elasticsearch.connectionUrlExample.documentationLinkText": "Please see documentation with server setup instructions.",
+ "admin.elasticsearch.usernameTitle": "Server Username:",
+ "admin.elasticsearch.usernameDescription": "(Optional) The username to authenticate to the Elasticsearch server.",
+ "admin.elasticsearch.passwordTitle": "Server Password:",
+ "admin.elasticsearch.passwordDescription": "(Optional) The password to authenticate to the Elasticsearch server.",
+ "admin.elasticsearch.sniffTitle": "Enable Cluster Sniffing:",
+ "admin.elasticsearch.sniffDescription": "When true, sniffing finds and connects to all data nodes in your cluster automatically.",
+ "admin.elasticsearch.enableSearchingTitle": "Enable Elasticsearch for search queries:",
+ "admin.elasticsearch.enableSearchingDescription": "Requires a successful connection to the Elasticsearch server. When true, Elasticsearch will be used for all search queries using the latest index. Search results may be incomplete until a bulk index of the existing post database is finished. When false, database search is used.",
+ "admin.elasticsearch.connectionUrlExample": "E.g.: \"https://elasticsearch.example.org:9200\"",
+ "admin.elasticsearch.usernameExample": "E.g.: \"elastic\"",
+ "admin.elasticsearch.password": "E.g.: \"yourpassword\"",
+ "admin.elasticsearch.testHelpText": "Tests if the Mattermost server can connect to the Elasticsearch server specified. Testing the connection does not save the configuration. See log file for more detailed error messages.",
+ "admin.elasticsearch.elasticsearch_test_button": "Test Connection",
"admin.email.agreeHPNS": " I understand and accept the Mattermost Hosted Push Notification Service <a href=\"https://about.mattermost.com/hpns-terms/\" target='_blank'>Terms of Service</a> and <a href=\"https://about.mattermost.com/hpns-privacy/\" target='_blank'>Privacy Policy</a>.",
"admin.email.allowEmailSignInDescription": "When true, Mattermost allows users to sign in using their email and password.",
"admin.email.allowEmailSignInTitle": "Enable sign-in with email: ",
diff --git a/webapp/package.json b/webapp/package.json
index 4870c5ce8..c083b80f4 100644
--- a/webapp/package.json
+++ b/webapp/package.json
@@ -11,6 +11,7 @@
"bootstrap-colorpicker": "2.5.1",
"chart.js": "2.5.0",
"compass-mixins": "0.12.10",
+ "exif2css": "1.2.0",
"fastclick": "1.0.6",
"flux": "3.1.2",
"font-awesome": "4.7.0",
diff --git a/webapp/routes/route_admin_console.jsx b/webapp/routes/route_admin_console.jsx
index b0b6ebf62..17e0290c2 100644
--- a/webapp/routes/route_admin_console.jsx
+++ b/webapp/routes/route_admin_console.jsx
@@ -45,6 +45,7 @@ import TeamAnalytics from 'components/analytics/team_analytics';
import LicenseSettings from 'components/admin_console/license_settings.jsx';
import Audits from 'components/admin_console/audits';
import Logs from 'components/admin_console/server_logs';
+import ElasticsearchSettings from 'components/admin_console/elasticsearch_settings.jsx';
export default (
<Route>
@@ -200,6 +201,10 @@ export default (
component={DatabaseSettings}
/>
<Route
+ path='elasticsearch'
+ component={ElasticsearchSettings}
+ />
+ <Route
path='developer'
component={DeveloperSettings}
/>
diff --git a/webapp/routes/route_integrations.jsx b/webapp/routes/route_integrations.jsx
index dd3ebe663..37b33ed40 100644
--- a/webapp/routes/route_integrations.jsx
+++ b/webapp/routes/route_integrations.jsx
@@ -47,13 +47,13 @@ export default {
{
path: 'add',
getComponents: (location, callback) => {
- System.import('components/integrations/components/add_outgoing_webhook.jsx').then(RouteUtils.importComponentSuccess(callback));
+ System.import('components/integrations/components/add_outgoing_webhook').then(RouteUtils.importComponentSuccess(callback));
}
},
{
path: 'edit',
getComponents: (location, callback) => {
- System.import('components/integrations/components/edit_outgoing_webhook.jsx').then(RouteUtils.importComponentSuccess(callback));
+ System.import('components/integrations/components/edit_outgoing_webhook').then(RouteUtils.importComponentSuccess(callback));
}
}
]
diff --git a/webapp/tests/components/integrations/__snapshots__/add_outgoing_hook.test.jsx.snap b/webapp/tests/components/integrations/__snapshots__/add_outgoing_hook.test.jsx.snap
new file mode 100644
index 000000000..a55f5db5e
--- /dev/null
+++ b/webapp/tests/components/integrations/__snapshots__/add_outgoing_hook.test.jsx.snap
@@ -0,0 +1,27 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`components/integrations/AddOutgoingWebhook should match snapshot 1`] = `
+<AbstractOutgoingWebhook
+ action={[Function]}
+ footer={
+ Object {
+ "defaultMessage": "Save",
+ "id": "add_outgoing_webhook.save",
+ }
+ }
+ header={
+ Object {
+ "defaultMessage": "Add",
+ "id": "integrations.add",
+ }
+ }
+ renderExtra=""
+ serverError=""
+ team={
+ Object {
+ "id": "testteamid",
+ "name": "test",
+ }
+ }
+/>
+`;
diff --git a/webapp/tests/components/integrations/__snapshots__/edit_outgoing_hook.test.jsx.snap b/webapp/tests/components/integrations/__snapshots__/edit_outgoing_hook.test.jsx.snap
new file mode 100644
index 000000000..d7656b08f
--- /dev/null
+++ b/webapp/tests/components/integrations/__snapshots__/edit_outgoing_hook.test.jsx.snap
@@ -0,0 +1,7 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`components/integrations/EditOutgoingWebhook should match snapshot 1`] = `
+<LoadingScreen
+ position="relative"
+/>
+`;
diff --git a/webapp/tests/components/integrations/add_outgoing_hook.test.jsx b/webapp/tests/components/integrations/add_outgoing_hook.test.jsx
new file mode 100644
index 000000000..0c92a7c83
--- /dev/null
+++ b/webapp/tests/components/integrations/add_outgoing_hook.test.jsx
@@ -0,0 +1,29 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import React from 'react';
+import {shallow} from 'enzyme';
+
+import AddOutgoingWebhook from 'components/integrations/components/add_outgoing_webhook/add_outgoing_webhook.jsx';
+
+describe('components/integrations/AddOutgoingWebhook', () => {
+ test('should match snapshot', () => {
+ function emptyFunction() {} //eslint-disable-line no-empty-function
+ const teamId = 'testteamid';
+
+ const wrapper = shallow(
+ <AddOutgoingWebhook
+ team={{
+ id: teamId,
+ name: 'test'
+ }}
+ createOutgoingHookRequest={{
+ status: 'not_started',
+ error: null
+ }}
+ actions={{createOutgoingHook: emptyFunction}}
+ />
+ );
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/webapp/tests/components/integrations/edit_outgoing_hook.test.jsx b/webapp/tests/components/integrations/edit_outgoing_hook.test.jsx
new file mode 100644
index 000000000..c2a5020a6
--- /dev/null
+++ b/webapp/tests/components/integrations/edit_outgoing_hook.test.jsx
@@ -0,0 +1,31 @@
+// Copyright (c) 2017-present Mattermost, Inc. All Rights Reserved.
+// See License.txt for license information.
+
+import React from 'react';
+import {shallow} from 'enzyme';
+
+import EditOutgoingWebhook from 'components/integrations/components/edit_outgoing_webhook/edit_outgoing_webhook.jsx';
+
+describe('components/integrations/EditOutgoingWebhook', () => {
+ test('should match snapshot', () => {
+ function emptyFunction() {} //eslint-disable-line no-empty-function
+ const teamId = 'testteamid';
+
+ const wrapper = shallow(
+ <EditOutgoingWebhook
+ team={{
+ id: teamId,
+ name: 'test'
+ }}
+ hookId={'somehookid'}
+ updateOutgoingHookRequest={{
+ status: 'not_started',
+ error: null
+ }}
+ actions={{updateOutgoingHook: emptyFunction, getOutgoingHook: emptyFunction}}
+ />
+ );
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
diff --git a/webapp/utils/utils.jsx b/webapp/utils/utils.jsx
index 83e538605..b14bdaf11 100644
--- a/webapp/utils/utils.jsx
+++ b/webapp/utils/utils.jsx
@@ -656,14 +656,14 @@ export function applyTheme(theme) {
changeCss('@media(min-width: 768px){.app__body .post:hover, .app__body .more-modal__list .more-modal__row:hover, .app__body .modal .settings-modal .settings-table .settings-content .section-min:hover', 'background:' + changeOpacity(theme.centerChannelColor, 0.08));
changeCss('.app__body .more-modal__row.more-modal__row--selected, .app__body .date-separator.hovered--before:after, .app__body .date-separator.hovered--after:before, .app__body .new-separator.hovered--after:before, .app__body .new-separator.hovered--before:after', 'background:' + changeOpacity(theme.centerChannelColor, 0.07));
changeCss('@media(min-width: 768px){.app__body .suggestion-list__content .command:hover, .app__body .mentions__name:hover, .app__body .dropdown-menu>li>a:focus, .app__body .dropdown-menu>li>a:hover', 'background:' + changeOpacity(theme.centerChannelColor, 0.15));
- changeCss('.app__body .suggestion--selected, .app__body .emoticon-suggestion:hover, .app__body .bot-indicator', 'background:' + changeOpacity(theme.centerChannelColor, 0.15), 1);
+ changeCss('.app__body .suggestion--selected, .app__body .emoticon-suggestion:hover, .app__body .bot-indicator', 'background:' + changeOpacity(theme.centerChannelColor, 0.15));
changeCss('code, .app__body .form-control[disabled], .app__body .form-control[readonly], .app__body fieldset[disabled] .form-control', 'background:' + changeOpacity(theme.centerChannelColor, 0.1));
changeCss('@media(min-width: 960px){.app__body .post.current--user:hover .post__body ', 'background: none;');
changeCss('.app__body .sidebar--right', 'color:' + theme.centerChannelColor);
changeCss('.app__body .search-help-popover .search-autocomplete__item:hover, .app__body .modal .settings-modal .settings-table .settings-content .appearance-section .theme-elements__body', 'background:' + changeOpacity(theme.centerChannelColor, 0.05));
changeCss('.app__body .search-help-popover .search-autocomplete__item.selected', 'background:' + changeOpacity(theme.centerChannelColor, 0.15));
if (!UserAgent.isFirefox() && !UserAgent.isInternetExplorer() && !UserAgent.isEdge()) {
- changeCss('body.app__body ::-webkit-scrollbar-thumb', 'background:' + changeOpacity(theme.centerChannelColor, 0.4), 1);
+ changeCss('body.app__body ::-webkit-scrollbar-thumb', 'background:' + changeOpacity(theme.centerChannelColor, 0.4));
}
changeCss('body', 'scrollbar-arrow-color:' + theme.centerChannelColor);
changeCss('.app__body .post-create__container .post-create-body .btn-file svg, .app__body .post.post--compact .post-image__column .post-image__details svg, .app__body .modal .about-modal .about-modal__logo svg, .app__body .post .post__img svg', 'fill:' + theme.centerChannelColor);
diff --git a/webapp/yarn.lock b/webapp/yarn.lock
index 7583c4312..a311ccaa0 100644
--- a/webapp/yarn.lock
+++ b/webapp/yarn.lock
@@ -2720,6 +2720,10 @@ executable@^1.0.0:
dependencies:
meow "^3.1.0"
+exif2css@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/exif2css/-/exif2css-1.2.0.tgz#8438e116921508e3dcc30cbe2407b1d5535e1b45"
+
exit-hook@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8"