From 42806ae965b861955235698f247df862fd655d09 Mon Sep 17 00:00:00 2001 From: Dmitry Samuylov Date: Tue, 28 Aug 2018 13:09:32 -0400 Subject: Feature/search after before on (#9219) * initial implementation of after, before, on search flags allowing to restrict the search to a specific day or a date range * missed setting beforeDate in SearchParams in one place * fixed condition when only flags are used for search without any plain terms * changed date format used for after/before/on flags to be in ISO8601 format as suggested in PR comments, added a helper function to pad month and day with zeroes allowing the user user either format, with or without leading zeroes * corrected expected compare to date setting for the TestParseDateFilterToTimeISO8601 test * fixed a bug for the scenario when you only have the date flags without any terms, added a couple of tests for that scenario * updated the date filter logic to use parameters to construct the query instead of simply appending strings together, as suggested in the pull request comments * added search unit test using date flags * added a helper function to create a test post with a createat date manually set, updated the test for search using date flags to create test posts with different createat dates to be able to better test the functionality * MM-11817 Add support for after/before/on search flags with Elasticsearch * add support to search posts to perform the search in context of the client's timezone when filtering by createat date using on: after: before: flags * updated tests to match the new signature --- api4/apitestlib.go | 11 +++++ api4/post.go | 8 +++- api4/post_test.go | 84 +++++++++++++++++++++++++++++++++- app/post.go | 4 +- model/client4.go | 12 ++++- model/post.go | 26 +++++++++++ model/search_params.go | 73 +++++++++++++++++++++++------ model/search_params_test.go | 106 +++++++++++++++++++++++++++++++++++++------ model/utils.go | 40 ++++++++++++++++ model/utils_test.go | 35 ++++++++++++++ store/sqlstore/post_store.go | 38 +++++++++++++++- 11 files changed, 401 insertions(+), 36 deletions(-) diff --git a/api4/apitestlib.go b/api4/apitestlib.go index 652a8f882..6a717faf1 100644 --- a/api4/apitestlib.go +++ b/api4/apitestlib.go @@ -444,6 +444,17 @@ func (me *TestHelper) CreateMessagePostWithClient(client *model.Client4, channel return rpost } +func (me *TestHelper) CreateMessagePostNoClient(channel *model.Channel, message string, createAtTime int64) *model.Post { + post := store.Must(me.App.Srv.Store.Post().Save(&model.Post{ + UserId: me.BasicUser.Id, + ChannelId: channel.Id, + Message: message, + CreateAt: createAtTime, + })).(*model.Post) + + return post +} + func (me *TestHelper) LoginBasic() { me.LoginBasicWithClient(me.Client) } diff --git a/api4/post.go b/api4/post.go index b4edc5124..014174b37 100644 --- a/api4/post.go +++ b/api4/post.go @@ -333,17 +333,23 @@ func searchPosts(c *Context, w http.ResponseWriter, r *http.Request) { includeDeletedChannels := r.URL.Query().Get("include_deleted_channels") == "true" props := model.StringInterfaceFromJson(r.Body) + terms, ok := props["terms"].(string) if !ok || len(terms) == 0 { c.SetInvalidParam("terms") return } + timeZoneOffset, ok := props["time_zone_offset"].(float64) + if !ok { + timeZoneOffset = 0 + } + isOrSearch, _ := props["is_or_search"].(bool) startTime := time.Now() - results, err := c.App.SearchPostsInTeam(terms, c.Session.UserId, c.Params.TeamId, isOrSearch, includeDeletedChannels) + results, err := c.App.SearchPostsInTeam(terms, c.Session.UserId, c.Params.TeamId, isOrSearch, includeDeletedChannels, int(timeZoneOffset)) elapsedTime := float64(time.Since(startTime)) / float64(time.Second) metrics := c.App.Metrics diff --git a/api4/post_test.go b/api4/post_test.go index 910443fef..5f8cb5b31 100644 --- a/api4/post_test.go +++ b/api4/post_test.go @@ -17,6 +17,7 @@ import ( "github.com/mattermost/mattermost-server/app" "github.com/mattermost/mattermost-server/model" + "github.com/mattermost/mattermost-server/utils" ) func TestCreatePost(t *testing.T) { @@ -1302,7 +1303,21 @@ func TestSearchPosts(t *testing.T) { _ = th.CreateMessagePostWithClient(th.Client, archivedChannel, "#hashtag for post3") th.Client.DeleteChannel(archivedChannel.Id) - posts, resp := Client.SearchPosts(th.BasicTeam.Id, "search", false) + terms := "search" + isOrSearch := false + timezoneOffset := 5 + searchParams := model.SearchParameter{ + Terms: &terms, + IsOrSearch: &isOrSearch, + TimeZoneOffset: &timezoneOffset, + } + posts, resp := Client.SearchPostsWithParams(th.BasicTeam.Id, &searchParams) + CheckNoError(t, resp) + if len(posts.Order) != 3 { + t.Fatal("wrong search") + } + + posts, resp = Client.SearchPosts(th.BasicTeam.Id, "search", false) CheckNoError(t, resp) if len(posts.Order) != 3 { t.Fatal("wrong search") @@ -1329,7 +1344,7 @@ func TestSearchPosts(t *testing.T) { th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.ExperimentalViewArchivedChannels = false }) - + posts, resp = Client.SearchPostsIncludeDeletedChannels(th.BasicTeam.Id, "#hashtag", false) CheckNoError(t, resp) if len(posts.Order) != 1 { @@ -1516,6 +1531,71 @@ func TestSearchPostsFromUser(t *testing.T) { } } +func TestSearchPostsWithDateFlags(t *testing.T) { + th := Setup().InitBasic() + defer th.TearDown() + th.LoginBasic() + Client := th.Client + + message := "sgtitlereview\n with return" + createDate := time.Date(2018, 8, 1, 5, 0, 0, 0, time.UTC) + _ = th.CreateMessagePostNoClient(th.BasicChannel, message, utils.MillisFromTime(createDate)) + + message = "other message with no return" + createDate = time.Date(2018, 8, 2, 5, 0, 0, 0, time.UTC) + _ = th.CreateMessagePostNoClient(th.BasicChannel, message, utils.MillisFromTime(createDate)) + + message = "other message with no return" + createDate = time.Date(2018, 8, 3, 5, 0, 0, 0, time.UTC) + _ = th.CreateMessagePostNoClient(th.BasicChannel, message, utils.MillisFromTime(createDate)) + + posts, _ := Client.SearchPosts(th.BasicTeam.Id, "return", false) + if len(posts.Order) != 3 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "on:", false) + if len(posts.Order) != 0 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "after:", false) + if len(posts.Order) != 0 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "before:", false) + if len(posts.Order) != 0 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "on:2018-08-01", false) + if len(posts.Order) != 1 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "after:2018-08-01", false) + resultCount := 0 + for _, post := range posts.Posts { + if post.UserId == th.BasicUser.Id { + resultCount = resultCount + 1 + } + } + if resultCount != 3 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "before:2018-08-02", false) + if len(posts.Order) != 2 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } + + posts, _ = Client.SearchPosts(th.BasicTeam.Id, "before:2018-08-03 after:2018-08-02", false) + if len(posts.Order) != 2 { + t.Fatalf("wrong number of posts returned %v", len(posts.Order)) + } +} + func TestGetFileInfosForPost(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() diff --git a/app/post.go b/app/post.go index fb0ef3f78..312269772 100644 --- a/app/post.go +++ b/app/post.go @@ -646,8 +646,8 @@ func (a *App) DeletePostFiles(post *model.Post) { } } -func (a *App) SearchPostsInTeam(terms string, userId string, teamId string, isOrSearch bool, includeDeletedChannels bool) (*model.PostSearchResults, *model.AppError) { - paramsList := model.ParseSearchParams(terms) +func (a *App) SearchPostsInTeam(terms string, userId string, teamId string, isOrSearch bool, includeDeletedChannels bool, timeZoneOffset int) (*model.PostSearchResults, *model.AppError) { + paramsList := model.ParseSearchParams(terms, timeZoneOffset) includeDeleted := includeDeletedChannels && *a.Config().TeamSettings.ExperimentalViewArchivedChannels esInterface := a.Elasticsearch diff --git a/model/client4.go b/model/client4.go index 4f651b976..47d227742 100644 --- a/model/client4.go +++ b/model/client4.go @@ -2140,8 +2140,16 @@ func (c *Client4) GetPostsBefore(channelId, postId string, page, perPage int, et // SearchPosts returns any posts with matching terms string. func (c *Client4) SearchPosts(teamId string, terms string, isOrSearch bool) (*PostList, *Response) { - requestBody := map[string]interface{}{"terms": terms, "is_or_search": isOrSearch} - if r, err := c.DoApiPost(c.GetTeamRoute(teamId)+"/posts/search", StringInterfaceToJson(requestBody)); err != nil { + params := SearchParameter{ + Terms: &terms, + IsOrSearch: &isOrSearch, + } + return c.SearchPostsWithParams(teamId, ¶ms) +} + +// SearchPosts returns any posts with matching terms string. +func (c *Client4) SearchPostsWithParams(teamId string, params *SearchParameter) (*PostList, *Response) { + if r, err := c.DoApiPost(c.GetTeamRoute(teamId)+"/posts/search", params.SearchParameterToJson()); err != nil { return nil, BuildErrorResponse(r, err) } else { defer closeBody(r) diff --git a/model/post.go b/model/post.go index 82cc0a0e9..d903156eb 100644 --- a/model/post.go +++ b/model/post.go @@ -96,6 +96,12 @@ type PostPatch struct { HasReactions *bool `json:"has_reactions"` } +type SearchParameter struct { + Terms *string `json:"terms"` + IsOrSearch *bool `json:"is_or_search"` + TimeZoneOffset *int `json:"time_zone_offset"` +} + func (o *PostPatch) WithRewrittenImageURLs(f func(string) string) *PostPatch { copy := *o if copy.Message != nil { @@ -359,6 +365,26 @@ func PostPatchFromJson(data io.Reader) *PostPatch { return &post } +func (o *SearchParameter) SearchParameterToJson() string { + b, err := json.Marshal(o) + if err != nil { + return "" + } + + return string(b) +} + +func SearchParameterFromJson(data io.Reader) *SearchParameter { + decoder := json.NewDecoder(data) + var searchParam SearchParameter + err := decoder.Decode(&searchParam) + if err != nil { + return nil + } + + return &searchParam +} + func (o *Post) ChannelMentions() []string { return ChannelMentions(o.Message) } diff --git a/model/search_params.go b/model/search_params.go index 21aa69a0d..8ed1fccfb 100644 --- a/model/search_params.go +++ b/model/search_params.go @@ -16,11 +16,33 @@ type SearchParams struct { IsHashtag bool InChannels []string FromUsers []string + AfterDate string + BeforeDate string + OnDate string OrTerms bool IncludeDeletedChannels bool + TimeZoneOffset int } -var searchFlags = [...]string{"from", "channel", "in"} +// Returns the epoch timestamp of the start of the day specified by SearchParams.AfterDate +func (p *SearchParams) GetAfterDateMillis() int64 { + date := ParseDateFilterToTime(p.AfterDate) + return GetStartOfDayMillis(date, p.TimeZoneOffset) +} + +// Returns the epoch timestamp of the end of the day specified by SearchParams.BeforeDate +func (p *SearchParams) GetBeforeDateMillis() int64 { + date := ParseDateFilterToTime(p.BeforeDate) + return GetEndOfDayMillis(date, p.TimeZoneOffset) +} + +// Returns the epoch timestamps of the start and end of the day specified by SearchParams.OnDate +func (p *SearchParams) GetOnDateMillis() (int64, int64) { + date := ParseDateFilterToTime(p.OnDate) + return GetStartOfDayMillis(date, p.TimeZoneOffset), GetEndOfDayMillis(date, p.TimeZoneOffset) +} + +var searchFlags = [...]string{"from", "channel", "in", "before", "after", "on"} func splitWords(text string) []string { words := []string{} @@ -101,7 +123,7 @@ func parseSearchFlags(input []string) ([]string, [][2]string) { return words, flags } -func ParseSearchParams(text string) []*SearchParams { +func ParseSearchParams(text string, timeZoneOffset int) []*SearchParams { words, flags := parseSearchFlags(splitWords(text)) hashtagTermList := []string{} @@ -120,6 +142,9 @@ func ParseSearchParams(text string) []*SearchParams { inChannels := []string{} fromUsers := []string{} + afterDate := "" + beforeDate := "" + onDate := "" for _, flagPair := range flags { flag := flagPair[0] @@ -129,6 +154,12 @@ func ParseSearchParams(text string) []*SearchParams { inChannels = append(inChannels, value) } else if flag == "from" { fromUsers = append(fromUsers, value) + } else if flag == "after" { + afterDate = value + } else if flag == "before" { + beforeDate = value + } else if flag == "on" { + onDate = value } } @@ -136,29 +167,41 @@ func ParseSearchParams(text string) []*SearchParams { if len(plainTerms) > 0 { paramsList = append(paramsList, &SearchParams{ - Terms: plainTerms, - IsHashtag: false, - InChannels: inChannels, - FromUsers: fromUsers, + Terms: plainTerms, + IsHashtag: false, + InChannels: inChannels, + FromUsers: fromUsers, + AfterDate: afterDate, + BeforeDate: beforeDate, + OnDate: onDate, + TimeZoneOffset: timeZoneOffset, }) } if len(hashtagTerms) > 0 { paramsList = append(paramsList, &SearchParams{ - Terms: hashtagTerms, - IsHashtag: true, - InChannels: inChannels, - FromUsers: fromUsers, + Terms: hashtagTerms, + IsHashtag: true, + InChannels: inChannels, + FromUsers: fromUsers, + AfterDate: afterDate, + BeforeDate: beforeDate, + OnDate: onDate, + TimeZoneOffset: timeZoneOffset, }) } // special case for when no terms are specified but we still have a filter - if len(plainTerms) == 0 && len(hashtagTerms) == 0 && (len(inChannels) != 0 || len(fromUsers) != 0) { + if len(plainTerms) == 0 && len(hashtagTerms) == 0 && (len(inChannels) != 0 || len(fromUsers) != 0 || len(afterDate) != 0 || len(beforeDate) != 0 || len(onDate) != 0) { paramsList = append(paramsList, &SearchParams{ - Terms: "", - IsHashtag: false, - InChannels: inChannels, - FromUsers: fromUsers, + Terms: "", + IsHashtag: false, + InChannels: inChannels, + FromUsers: fromUsers, + AfterDate: afterDate, + BeforeDate: beforeDate, + OnDate: onDate, + TimeZoneOffset: timeZoneOffset, }) } diff --git a/model/search_params_test.go b/model/search_params_test.go index 3de4e0b52..49480352e 100644 --- a/model/search_params_test.go +++ b/model/search_params_test.go @@ -166,58 +166,138 @@ func TestParseSearchFlags(t *testing.T) { } else if len(flags) != 2 || flags[0][0] != "in" || flags[0][1] != "here" || flags[1][0] != "from" || flags[1][1] != "someone" { t.Fatalf("got incorrect flags %v", flags) } + + if words, flags := parseSearchFlags(splitWords("after:2018-1-1")); len(words) != 0 { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana after:2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana before:2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "before" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana after:2018-1-1 before:2018-1-10")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("#apple #banana after:2018-1-1")); len(words) != 2 || words[0] != "#apple" || words[1] != "#banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("#apple #banana before:2018-1-1")); len(words) != 2 || words[0] != "#apple" || words[1] != "#banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "before" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("#apple #banana after:2018-1-1 before:2018-1-10")); len(words) != 2 || words[0] != "#apple" || words[1] != "#banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana after: 2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana before: 2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "before" || flags[0][1] != "2018-1-1" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana after: 2018-1-1 before: 2018-1-10")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("apple banana after: 2018-1-1 before: 2018-1-10 #fruit")); len(words) != 3 || words[0] != "apple" || words[1] != "banana" || words[2] != "#fruit" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" { + t.Fatalf("got incorrect flags %v", flags) + } + + if words, flags := parseSearchFlags(splitWords("test after:2018-7-1")); len(words) != 1 || words[0] != "test" { + t.Fatalf("got incorrect words %v", words) + } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-7-1" { + t.Fatalf("got incorrect flags %v", flags) + } } func TestParseSearchParams(t *testing.T) { - if sp := ParseSearchParams(""); len(sp) != 0 { + if sp := ParseSearchParams("", 0); len(sp) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams(" "); len(sp) != 0 { + if sp := ParseSearchParams(" ", 0); len(sp) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("words words"); len(sp) != 1 || sp[0].Terms != "words words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("words words", 0); len(sp) != 1 || sp[0].Terms != "words words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("\"my stuff\""); len(sp) != 1 || sp[0].Terms != "\"my stuff\"" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("\"my stuff\"", 0); len(sp) != 1 || sp[0].Terms != "\"my stuff\"" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("#words #words"); len(sp) != 1 || sp[0].Terms != "#words #words" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("#words #words", 0); len(sp) != 1 || sp[0].Terms != "#words #words" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("#words words"); len(sp) != 2 || sp[1].Terms != "#words" || !sp[1].IsHashtag || len(sp[1].InChannels) != 0 || len(sp[1].FromUsers) != 0 || sp[0].Terms != "words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 { + if sp := ParseSearchParams("#words words", 0); len(sp) != 2 || sp[1].Terms != "#words" || !sp[1].IsHashtag || len(sp[1].InChannels) != 0 || len(sp[1].FromUsers) != 0 || sp[0].Terms != "words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("in:channel"); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("in:channel", 0); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("testing in:channel"); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("testing in:channel", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("in:channel testing"); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("in:channel testing", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("in:channel in:otherchannel"); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 2 || sp[0].InChannels[0] != "channel" || sp[0].InChannels[1] != "otherchannel" || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("in:channel in:otherchannel", 0); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 2 || sp[0].InChannels[0] != "channel" || sp[0].InChannels[1] != "otherchannel" || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp) } - if sp := ParseSearchParams("testing in:channel from:someone"); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 1 || sp[0].FromUsers[0] != "someone" { + if sp := ParseSearchParams("testing in:channel from:someone", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 1 || sp[0].FromUsers[0] != "someone" { t.Fatalf("Incorrect output from parse search params: %v", sp[0]) } - if sp := ParseSearchParams("##hashtag +#plus+"); len(sp) != 1 || sp[0].Terms != "#hashtag #plus" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("##hashtag +#plus+", 0); len(sp) != 1 || sp[0].Terms != "#hashtag #plus" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp[0]) } - if sp := ParseSearchParams("wildcar*"); len(sp) != 1 || sp[0].Terms != "wildcar*" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { + if sp := ParseSearchParams("wildcar*", 0); len(sp) != 1 || sp[0].Terms != "wildcar*" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 { t.Fatalf("Incorrect output from parse search params: %v", sp[0]) } + + if sp := ParseSearchParams("after:2018-8-1 testing", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].AfterDate) == 0 || sp[0].AfterDate != "2018-8-1" { + t.Fatalf("Incorrect output from parse search params: %v", sp) + } + + if sp := ParseSearchParams("after:2018-8-1", 0); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].AfterDate) == 0 || sp[0].AfterDate != "2018-8-1" { + t.Fatalf("Incorrect output from parse search params: %v", sp) + } } diff --git a/model/utils.go b/model/utils.go index 574f43e06..7002070d7 100644 --- a/model/utils.go +++ b/model/utils.go @@ -148,6 +148,46 @@ func GetMillis() int64 { return time.Now().UnixNano() / int64(time.Millisecond) } +// GetMillisForTime is a convience method to get milliseconds since epoch for provided Time. +func GetMillisForTime(thisTime time.Time) int64 { + return thisTime.UnixNano() / int64(time.Millisecond) +} + +// ParseDateFilterToTime is a convience method to get Time from string +func ParseDateFilterToTime(filterString string) time.Time { + resultTime, err := time.Parse("2006-01-02", PadDateStringZeros(filterString)) + if err != nil { + return time.Now() + } + return resultTime +} + +// PadDateStringZeros is a convience method to pad 2 digit date parts with zeros to meet ISO 8601 format +func PadDateStringZeros(dateString string) string { + parts := strings.Split(dateString, "-") + for index, part := range parts { + if len(part) == 1 { + parts[index] = "0" + part + } + } + dateString = strings.Join(parts[:], "-") + return dateString +} + +// GetStartOfDayMillis is a convience method to get milliseconds since epoch for provided date's start of day +func GetStartOfDayMillis(thisTime time.Time, timeZoneOffset int) int64 { + localSearchTimeZone := time.FixedZone("Local Search Time Zone", timeZoneOffset) + resultTime := time.Date(thisTime.Year(), thisTime.Month(), thisTime.Day(), 0, 0, 0, 0, localSearchTimeZone) + return GetMillisForTime(resultTime) +} + +// GetEndOfDayMillis is a convience method to get milliseconds since epoch for provided date's end of day +func GetEndOfDayMillis(thisTime time.Time, timeZoneOffset int) int64 { + localSearchTimeZone := time.FixedZone("Local Search Time Zone", timeZoneOffset) + resultTime := time.Date(thisTime.Year(), thisTime.Month(), thisTime.Day(), 23, 59, 59, 999999999, localSearchTimeZone) + return GetMillisForTime(resultTime) +} + func CopyStringMap(originalMap map[string]string) map[string]string { copyMap := make(map[string]string) for k, v := range originalMap { diff --git a/model/utils_test.go b/model/utils_test.go index d35146b30..f004fc216 100644 --- a/model/utils_test.go +++ b/model/utils_test.go @@ -4,9 +4,11 @@ package model import ( + "fmt" "net/http" "strings" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -30,6 +32,39 @@ func TestRandomString(t *testing.T) { } } +func TestGetMillisForTime(t *testing.T) { + thisTimeMillis := int64(1471219200000) + thisTime := time.Date(2016, time.August, 15, 0, 0, 0, 0, time.UTC) + + result := GetMillisForTime(thisTime) + + if thisTimeMillis != result { + t.Fatalf(fmt.Sprintf("millis are not the same: %d and %d", thisTimeMillis, result)) + } +} + +func TestParseDateFilterToTimeISO8601(t *testing.T) { + testString := "2016-08-01" + compareTime := time.Date(2016, time.August, 1, 0, 0, 0, 0, time.UTC) + + result := ParseDateFilterToTime(testString) + + if result != compareTime { + t.Fatalf(fmt.Sprintf("parsed date doesn't match the expected result: parsed result %v and expected time %v", result, compareTime)) + } +} + +func TestParseDateFilterToTimeNeedZeroPadding(t *testing.T) { + testString := "2016-8-1" + compareTime := time.Date(2016, time.August, 1, 0, 0, 0, 0, time.UTC) + + result := ParseDateFilterToTime(testString) + + if result != compareTime { + t.Fatalf(fmt.Sprintf("parsed date doesn't match the expected result: parsed result %v and expected time %v", result, compareTime)) + } +} + func TestAppError(t *testing.T) { err := NewAppError("TestAppError", "message", nil, "", http.StatusInternalServerError) json := err.ToJson() diff --git a/store/sqlstore/post_store.go b/store/sqlstore/post_store.go index 90db80796..14a6039bc 100644 --- a/store/sqlstore/post_store.go +++ b/store/sqlstore/post_store.go @@ -783,7 +783,7 @@ func (s *SqlPostStore) Search(teamId string, userId string, params *model.Search termMap := map[string]bool{} terms := params.Terms - if terms == "" && len(params.InChannels) == 0 && len(params.FromUsers) == 0 { + if terms == "" && len(params.InChannels) == 0 && len(params.FromUsers) == 0 && len(params.OnDate) == 0 && len(params.AfterDate) == 0 && len(params.BeforeDate) == 0 { result.Data = []*model.Post{} return } @@ -829,6 +829,7 @@ func (s *SqlPostStore) Search(teamId string, userId string, params *model.Search AND UserId = :UserId ` + deletedQueryPart + ` CHANNEL_FILTER) + CREATEDATE_CLAUSE SEARCH_CLAUSE ORDER BY CreateAt DESC LIMIT 100` @@ -889,6 +890,41 @@ func (s *SqlPostStore) Search(teamId string, userId string, params *model.Search searchQuery = strings.Replace(searchQuery, "POST_FILTER", "", 1) } + // handle after: before: on: filters + if len(params.AfterDate) > 1 || len(params.BeforeDate) > 1 || len(params.OnDate) > 1 { + if len(params.OnDate) > 1 { + onDateStart, onDateEnd := params.GetOnDateMillis() + queryParams["OnDateStart"] = strconv.FormatInt(onDateStart, 10) + queryParams["OnDateEnd"] = strconv.FormatInt(onDateEnd, 10) + + // between `on date` start of day and end of day + searchQuery = strings.Replace(searchQuery, "CREATEDATE_CLAUSE", "AND CreateAt BETWEEN :OnDateStart AND :OnDateEnd ", 1) + } else if len(params.AfterDate) > 1 && len(params.BeforeDate) > 1 { + afterDate := params.GetAfterDateMillis() + beforeDate := params.GetBeforeDateMillis() + queryParams["OnDateStart"] = strconv.FormatInt(afterDate, 10) + queryParams["OnDateEnd"] = strconv.FormatInt(beforeDate, 10) + + // between clause + searchQuery = strings.Replace(searchQuery, "CREATEDATE_CLAUSE", "AND CreateAt BETWEEN :OnDateStart AND :OnDateEnd ", 1) + } else if len(params.AfterDate) > 1 { + afterDate := params.GetAfterDateMillis() + queryParams["AfterDate"] = strconv.FormatInt(afterDate, 10) + + // greater than `after date` + searchQuery = strings.Replace(searchQuery, "CREATEDATE_CLAUSE", "AND CreateAt >= :AfterDate ", 1) + } else if len(params.BeforeDate) > 1 { + beforeDate := params.GetBeforeDateMillis() + queryParams["BeforeDate"] = strconv.FormatInt(beforeDate, 10) + + // less than `before date` + searchQuery = strings.Replace(searchQuery, "CREATEDATE_CLAUSE", "AND CreateAt <= :BeforeDate ", 1) + } + } else { + // no create date filters set + searchQuery = strings.Replace(searchQuery, "CREATEDATE_CLAUSE", "", 1) + } + if terms == "" { // we've already confirmed that we have a channel or user to search for searchQuery = strings.Replace(searchQuery, "SEARCH_CLAUSE", "", 1) -- cgit v1.2.3-1-g7c22