summaryrefslogtreecommitdiffstats
path: root/model
diff options
context:
space:
mode:
authorDmitry Samuylov <dsamuylov@pharo.com>2018-08-28 13:09:32 -0400
committerHarrison Healey <harrisonmhealey@gmail.com>2018-08-28 13:09:32 -0400
commit42806ae965b861955235698f247df862fd655d09 (patch)
tree4a92f098976b089dc832f3f0dc5ecfc0392690ed /model
parent61e27beabc9804fdcf59ed9df2180802175a4f70 (diff)
downloadchat-42806ae965b861955235698f247df862fd655d09.tar.gz
chat-42806ae965b861955235698f247df862fd655d09.tar.bz2
chat-42806ae965b861955235698f247df862fd655d09.zip
Feature/search after before on (#9219)
* initial implementation of after, before, on search flags allowing to restrict the search to a specific day or a date range * missed setting beforeDate in SearchParams in one place * fixed condition when only flags are used for search without any plain terms * changed date format used for after/before/on flags to be in ISO8601 format as suggested in PR comments, added a helper function to pad month and day with zeroes allowing the user user either format, with or without leading zeroes * corrected expected compare to date setting for the TestParseDateFilterToTimeISO8601 test * fixed a bug for the scenario when you only have the date flags without any terms, added a couple of tests for that scenario * updated the date filter logic to use parameters to construct the query instead of simply appending strings together, as suggested in the pull request comments * added search unit test using date flags * added a helper function to create a test post with a createat date manually set, updated the test for search using date flags to create test posts with different createat dates to be able to better test the functionality * MM-11817 Add support for after/before/on search flags with Elasticsearch * add support to search posts to perform the search in context of the client's timezone when filtering by createat date using on: after: before: flags * updated tests to match the new signature
Diffstat (limited to 'model')
-rw-r--r--model/client4.go12
-rw-r--r--model/post.go26
-rw-r--r--model/search_params.go73
-rw-r--r--model/search_params_test.go106
-rw-r--r--model/utils.go40
-rw-r--r--model/utils_test.go35
6 files changed, 262 insertions, 30 deletions
diff --git a/model/client4.go b/model/client4.go
index 4f651b976..47d227742 100644
--- a/model/client4.go
+++ b/model/client4.go
@@ -2140,8 +2140,16 @@ func (c *Client4) GetPostsBefore(channelId, postId string, page, perPage int, et
// SearchPosts returns any posts with matching terms string.
func (c *Client4) SearchPosts(teamId string, terms string, isOrSearch bool) (*PostList, *Response) {
- requestBody := map[string]interface{}{"terms": terms, "is_or_search": isOrSearch}
- if r, err := c.DoApiPost(c.GetTeamRoute(teamId)+"/posts/search", StringInterfaceToJson(requestBody)); err != nil {
+ params := SearchParameter{
+ Terms: &terms,
+ IsOrSearch: &isOrSearch,
+ }
+ return c.SearchPostsWithParams(teamId, &params)
+}
+
+// SearchPosts returns any posts with matching terms string.
+func (c *Client4) SearchPostsWithParams(teamId string, params *SearchParameter) (*PostList, *Response) {
+ if r, err := c.DoApiPost(c.GetTeamRoute(teamId)+"/posts/search", params.SearchParameterToJson()); err != nil {
return nil, BuildErrorResponse(r, err)
} else {
defer closeBody(r)
diff --git a/model/post.go b/model/post.go
index 82cc0a0e9..d903156eb 100644
--- a/model/post.go
+++ b/model/post.go
@@ -96,6 +96,12 @@ type PostPatch struct {
HasReactions *bool `json:"has_reactions"`
}
+type SearchParameter struct {
+ Terms *string `json:"terms"`
+ IsOrSearch *bool `json:"is_or_search"`
+ TimeZoneOffset *int `json:"time_zone_offset"`
+}
+
func (o *PostPatch) WithRewrittenImageURLs(f func(string) string) *PostPatch {
copy := *o
if copy.Message != nil {
@@ -359,6 +365,26 @@ func PostPatchFromJson(data io.Reader) *PostPatch {
return &post
}
+func (o *SearchParameter) SearchParameterToJson() string {
+ b, err := json.Marshal(o)
+ if err != nil {
+ return ""
+ }
+
+ return string(b)
+}
+
+func SearchParameterFromJson(data io.Reader) *SearchParameter {
+ decoder := json.NewDecoder(data)
+ var searchParam SearchParameter
+ err := decoder.Decode(&searchParam)
+ if err != nil {
+ return nil
+ }
+
+ return &searchParam
+}
+
func (o *Post) ChannelMentions() []string {
return ChannelMentions(o.Message)
}
diff --git a/model/search_params.go b/model/search_params.go
index 21aa69a0d..8ed1fccfb 100644
--- a/model/search_params.go
+++ b/model/search_params.go
@@ -16,11 +16,33 @@ type SearchParams struct {
IsHashtag bool
InChannels []string
FromUsers []string
+ AfterDate string
+ BeforeDate string
+ OnDate string
OrTerms bool
IncludeDeletedChannels bool
+ TimeZoneOffset int
}
-var searchFlags = [...]string{"from", "channel", "in"}
+// Returns the epoch timestamp of the start of the day specified by SearchParams.AfterDate
+func (p *SearchParams) GetAfterDateMillis() int64 {
+ date := ParseDateFilterToTime(p.AfterDate)
+ return GetStartOfDayMillis(date, p.TimeZoneOffset)
+}
+
+// Returns the epoch timestamp of the end of the day specified by SearchParams.BeforeDate
+func (p *SearchParams) GetBeforeDateMillis() int64 {
+ date := ParseDateFilterToTime(p.BeforeDate)
+ return GetEndOfDayMillis(date, p.TimeZoneOffset)
+}
+
+// Returns the epoch timestamps of the start and end of the day specified by SearchParams.OnDate
+func (p *SearchParams) GetOnDateMillis() (int64, int64) {
+ date := ParseDateFilterToTime(p.OnDate)
+ return GetStartOfDayMillis(date, p.TimeZoneOffset), GetEndOfDayMillis(date, p.TimeZoneOffset)
+}
+
+var searchFlags = [...]string{"from", "channel", "in", "before", "after", "on"}
func splitWords(text string) []string {
words := []string{}
@@ -101,7 +123,7 @@ func parseSearchFlags(input []string) ([]string, [][2]string) {
return words, flags
}
-func ParseSearchParams(text string) []*SearchParams {
+func ParseSearchParams(text string, timeZoneOffset int) []*SearchParams {
words, flags := parseSearchFlags(splitWords(text))
hashtagTermList := []string{}
@@ -120,6 +142,9 @@ func ParseSearchParams(text string) []*SearchParams {
inChannels := []string{}
fromUsers := []string{}
+ afterDate := ""
+ beforeDate := ""
+ onDate := ""
for _, flagPair := range flags {
flag := flagPair[0]
@@ -129,6 +154,12 @@ func ParseSearchParams(text string) []*SearchParams {
inChannels = append(inChannels, value)
} else if flag == "from" {
fromUsers = append(fromUsers, value)
+ } else if flag == "after" {
+ afterDate = value
+ } else if flag == "before" {
+ beforeDate = value
+ } else if flag == "on" {
+ onDate = value
}
}
@@ -136,29 +167,41 @@ func ParseSearchParams(text string) []*SearchParams {
if len(plainTerms) > 0 {
paramsList = append(paramsList, &SearchParams{
- Terms: plainTerms,
- IsHashtag: false,
- InChannels: inChannels,
- FromUsers: fromUsers,
+ Terms: plainTerms,
+ IsHashtag: false,
+ InChannels: inChannels,
+ FromUsers: fromUsers,
+ AfterDate: afterDate,
+ BeforeDate: beforeDate,
+ OnDate: onDate,
+ TimeZoneOffset: timeZoneOffset,
})
}
if len(hashtagTerms) > 0 {
paramsList = append(paramsList, &SearchParams{
- Terms: hashtagTerms,
- IsHashtag: true,
- InChannels: inChannels,
- FromUsers: fromUsers,
+ Terms: hashtagTerms,
+ IsHashtag: true,
+ InChannels: inChannels,
+ FromUsers: fromUsers,
+ AfterDate: afterDate,
+ BeforeDate: beforeDate,
+ OnDate: onDate,
+ TimeZoneOffset: timeZoneOffset,
})
}
// special case for when no terms are specified but we still have a filter
- if len(plainTerms) == 0 && len(hashtagTerms) == 0 && (len(inChannels) != 0 || len(fromUsers) != 0) {
+ if len(plainTerms) == 0 && len(hashtagTerms) == 0 && (len(inChannels) != 0 || len(fromUsers) != 0 || len(afterDate) != 0 || len(beforeDate) != 0 || len(onDate) != 0) {
paramsList = append(paramsList, &SearchParams{
- Terms: "",
- IsHashtag: false,
- InChannels: inChannels,
- FromUsers: fromUsers,
+ Terms: "",
+ IsHashtag: false,
+ InChannels: inChannels,
+ FromUsers: fromUsers,
+ AfterDate: afterDate,
+ BeforeDate: beforeDate,
+ OnDate: onDate,
+ TimeZoneOffset: timeZoneOffset,
})
}
diff --git a/model/search_params_test.go b/model/search_params_test.go
index 3de4e0b52..49480352e 100644
--- a/model/search_params_test.go
+++ b/model/search_params_test.go
@@ -166,58 +166,138 @@ func TestParseSearchFlags(t *testing.T) {
} else if len(flags) != 2 || flags[0][0] != "in" || flags[0][1] != "here" || flags[1][0] != "from" || flags[1][1] != "someone" {
t.Fatalf("got incorrect flags %v", flags)
}
+
+ if words, flags := parseSearchFlags(splitWords("after:2018-1-1")); len(words) != 0 {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana after:2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana before:2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "before" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana after:2018-1-1 before:2018-1-10")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("#apple #banana after:2018-1-1")); len(words) != 2 || words[0] != "#apple" || words[1] != "#banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("#apple #banana before:2018-1-1")); len(words) != 2 || words[0] != "#apple" || words[1] != "#banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "before" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("#apple #banana after:2018-1-1 before:2018-1-10")); len(words) != 2 || words[0] != "#apple" || words[1] != "#banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana after: 2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana before: 2018-1-1")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "before" || flags[0][1] != "2018-1-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana after: 2018-1-1 before: 2018-1-10")); len(words) != 2 || words[0] != "apple" || words[1] != "banana" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("apple banana after: 2018-1-1 before: 2018-1-10 #fruit")); len(words) != 3 || words[0] != "apple" || words[1] != "banana" || words[2] != "#fruit" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 2 || flags[0][0] != "after" || flags[0][1] != "2018-1-1" || flags[1][0] != "before" || flags[1][1] != "2018-1-10" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
+
+ if words, flags := parseSearchFlags(splitWords("test after:2018-7-1")); len(words) != 1 || words[0] != "test" {
+ t.Fatalf("got incorrect words %v", words)
+ } else if len(flags) != 1 || flags[0][0] != "after" || flags[0][1] != "2018-7-1" {
+ t.Fatalf("got incorrect flags %v", flags)
+ }
}
func TestParseSearchParams(t *testing.T) {
- if sp := ParseSearchParams(""); len(sp) != 0 {
+ if sp := ParseSearchParams("", 0); len(sp) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams(" "); len(sp) != 0 {
+ if sp := ParseSearchParams(" ", 0); len(sp) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("words words"); len(sp) != 1 || sp[0].Terms != "words words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("words words", 0); len(sp) != 1 || sp[0].Terms != "words words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("\"my stuff\""); len(sp) != 1 || sp[0].Terms != "\"my stuff\"" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("\"my stuff\"", 0); len(sp) != 1 || sp[0].Terms != "\"my stuff\"" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("#words #words"); len(sp) != 1 || sp[0].Terms != "#words #words" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("#words #words", 0); len(sp) != 1 || sp[0].Terms != "#words #words" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("#words words"); len(sp) != 2 || sp[1].Terms != "#words" || !sp[1].IsHashtag || len(sp[1].InChannels) != 0 || len(sp[1].FromUsers) != 0 || sp[0].Terms != "words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 {
+ if sp := ParseSearchParams("#words words", 0); len(sp) != 2 || sp[1].Terms != "#words" || !sp[1].IsHashtag || len(sp[1].InChannels) != 0 || len(sp[1].FromUsers) != 0 || sp[0].Terms != "words" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("in:channel"); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("in:channel", 0); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("testing in:channel"); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("testing in:channel", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("in:channel testing"); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("in:channel testing", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("in:channel in:otherchannel"); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 2 || sp[0].InChannels[0] != "channel" || sp[0].InChannels[1] != "otherchannel" || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("in:channel in:otherchannel", 0); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].InChannels) != 2 || sp[0].InChannels[0] != "channel" || sp[0].InChannels[1] != "otherchannel" || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp)
}
- if sp := ParseSearchParams("testing in:channel from:someone"); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 1 || sp[0].FromUsers[0] != "someone" {
+ if sp := ParseSearchParams("testing in:channel from:someone", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].InChannels) != 1 || sp[0].InChannels[0] != "channel" || len(sp[0].FromUsers) != 1 || sp[0].FromUsers[0] != "someone" {
t.Fatalf("Incorrect output from parse search params: %v", sp[0])
}
- if sp := ParseSearchParams("##hashtag +#plus+"); len(sp) != 1 || sp[0].Terms != "#hashtag #plus" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("##hashtag +#plus+", 0); len(sp) != 1 || sp[0].Terms != "#hashtag #plus" || !sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp[0])
}
- if sp := ParseSearchParams("wildcar*"); len(sp) != 1 || sp[0].Terms != "wildcar*" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
+ if sp := ParseSearchParams("wildcar*", 0); len(sp) != 1 || sp[0].Terms != "wildcar*" || sp[0].IsHashtag || len(sp[0].InChannels) != 0 || len(sp[0].FromUsers) != 0 {
t.Fatalf("Incorrect output from parse search params: %v", sp[0])
}
+
+ if sp := ParseSearchParams("after:2018-8-1 testing", 0); len(sp) != 1 || sp[0].Terms != "testing" || len(sp[0].AfterDate) == 0 || sp[0].AfterDate != "2018-8-1" {
+ t.Fatalf("Incorrect output from parse search params: %v", sp)
+ }
+
+ if sp := ParseSearchParams("after:2018-8-1", 0); len(sp) != 1 || sp[0].Terms != "" || len(sp[0].AfterDate) == 0 || sp[0].AfterDate != "2018-8-1" {
+ t.Fatalf("Incorrect output from parse search params: %v", sp)
+ }
}
diff --git a/model/utils.go b/model/utils.go
index 574f43e06..7002070d7 100644
--- a/model/utils.go
+++ b/model/utils.go
@@ -148,6 +148,46 @@ func GetMillis() int64 {
return time.Now().UnixNano() / int64(time.Millisecond)
}
+// GetMillisForTime is a convience method to get milliseconds since epoch for provided Time.
+func GetMillisForTime(thisTime time.Time) int64 {
+ return thisTime.UnixNano() / int64(time.Millisecond)
+}
+
+// ParseDateFilterToTime is a convience method to get Time from string
+func ParseDateFilterToTime(filterString string) time.Time {
+ resultTime, err := time.Parse("2006-01-02", PadDateStringZeros(filterString))
+ if err != nil {
+ return time.Now()
+ }
+ return resultTime
+}
+
+// PadDateStringZeros is a convience method to pad 2 digit date parts with zeros to meet ISO 8601 format
+func PadDateStringZeros(dateString string) string {
+ parts := strings.Split(dateString, "-")
+ for index, part := range parts {
+ if len(part) == 1 {
+ parts[index] = "0" + part
+ }
+ }
+ dateString = strings.Join(parts[:], "-")
+ return dateString
+}
+
+// GetStartOfDayMillis is a convience method to get milliseconds since epoch for provided date's start of day
+func GetStartOfDayMillis(thisTime time.Time, timeZoneOffset int) int64 {
+ localSearchTimeZone := time.FixedZone("Local Search Time Zone", timeZoneOffset)
+ resultTime := time.Date(thisTime.Year(), thisTime.Month(), thisTime.Day(), 0, 0, 0, 0, localSearchTimeZone)
+ return GetMillisForTime(resultTime)
+}
+
+// GetEndOfDayMillis is a convience method to get milliseconds since epoch for provided date's end of day
+func GetEndOfDayMillis(thisTime time.Time, timeZoneOffset int) int64 {
+ localSearchTimeZone := time.FixedZone("Local Search Time Zone", timeZoneOffset)
+ resultTime := time.Date(thisTime.Year(), thisTime.Month(), thisTime.Day(), 23, 59, 59, 999999999, localSearchTimeZone)
+ return GetMillisForTime(resultTime)
+}
+
func CopyStringMap(originalMap map[string]string) map[string]string {
copyMap := make(map[string]string)
for k, v := range originalMap {
diff --git a/model/utils_test.go b/model/utils_test.go
index d35146b30..f004fc216 100644
--- a/model/utils_test.go
+++ b/model/utils_test.go
@@ -4,9 +4,11 @@
package model
import (
+ "fmt"
"net/http"
"strings"
"testing"
+ "time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -30,6 +32,39 @@ func TestRandomString(t *testing.T) {
}
}
+func TestGetMillisForTime(t *testing.T) {
+ thisTimeMillis := int64(1471219200000)
+ thisTime := time.Date(2016, time.August, 15, 0, 0, 0, 0, time.UTC)
+
+ result := GetMillisForTime(thisTime)
+
+ if thisTimeMillis != result {
+ t.Fatalf(fmt.Sprintf("millis are not the same: %d and %d", thisTimeMillis, result))
+ }
+}
+
+func TestParseDateFilterToTimeISO8601(t *testing.T) {
+ testString := "2016-08-01"
+ compareTime := time.Date(2016, time.August, 1, 0, 0, 0, 0, time.UTC)
+
+ result := ParseDateFilterToTime(testString)
+
+ if result != compareTime {
+ t.Fatalf(fmt.Sprintf("parsed date doesn't match the expected result: parsed result %v and expected time %v", result, compareTime))
+ }
+}
+
+func TestParseDateFilterToTimeNeedZeroPadding(t *testing.T) {
+ testString := "2016-8-1"
+ compareTime := time.Date(2016, time.August, 1, 0, 0, 0, 0, time.UTC)
+
+ result := ParseDateFilterToTime(testString)
+
+ if result != compareTime {
+ t.Fatalf(fmt.Sprintf("parsed date doesn't match the expected result: parsed result %v and expected time %v", result, compareTime))
+ }
+}
+
func TestAppError(t *testing.T) {
err := NewAppError("TestAppError", "message", nil, "", http.StatusInternalServerError)
json := err.ToJson()