From 4944980b2d29dbc01396f5520533aa5bdcb1da64 Mon Sep 17 00:00:00 2001 From: zouap Date: Fri, 31 Dec 2021 10:21:23 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- models/dbsql/dataset_foreigntable_for_es.sql | 5 +- models/dbsql/issue_foreigntable_for_es.sql | 10 +- models/dbsql/pr_foreigntable_for_es.sql | 7 +- models/dbsql/repo_foreigntable_for_es.sql | 7 +- models/dbsql/user_foreigntable_for_es.sql | 8 +- routers/search.go | 105 +++++++++++++++++-- 6 files changed, 123 insertions(+), 19 deletions(-) diff --git a/models/dbsql/dataset_foreigntable_for_es.sql b/models/dbsql/dataset_foreigntable_for_es.sql index 1ba386b3e..f9fb40c88 100644 --- a/models/dbsql/dataset_foreigntable_for_es.sql +++ b/models/dbsql/dataset_foreigntable_for_es.sql @@ -102,6 +102,7 @@ CREATE TRIGGER es_insert_dataset AFTER INSERT ON public.dataset FOR EACH ROW EXECUTE PROCEDURE insert_dataset_data(); +ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_insert_dataset; CREATE OR REPLACE FUNCTION public.udpate_dataset_file_name_delete() RETURNS trigger AS $def$ @@ -117,6 +118,7 @@ CREATE TRIGGER es_udpate_dataset_file_name_delete AFTER DELETE ON public.attachment FOR EACH ROW EXECUTE PROCEDURE udpate_dataset_file_name_delete(); +ALTER TABLE public.attachment ENABLE ALWAYS TRIGGER es_udpate_dataset_file_name_delete; CREATE OR REPLACE FUNCTION public.update_dataset() RETURNS trigger AS $def$ @@ -139,6 +141,7 @@ CREATE TRIGGER es_update_dataset AFTER UPDATE ON public.dataset FOR EACH ROW EXECUTE PROCEDURE update_dataset(); +ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_update_dataset; CREATE OR REPLACE FUNCTION public.delete_dataset() RETURNS trigger AS $def$ @@ -155,4 +158,4 @@ CREATE TRIGGER es_delete_dataset AFTER DELETE ON public.dataset FOR EACH ROW EXECUTE PROCEDURE delete_dataset(); - +ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_delete_dataset; diff --git a/models/dbsql/issue_foreigntable_for_es.sql b/models/dbsql/issue_foreigntable_for_es.sql index 012cf9701..1162c55ac 100644 --- a/models/dbsql/issue_foreigntable_for_es.sql +++ b/models/dbsql/issue_foreigntable_for_es.sql @@ -148,7 +148,7 @@ CREATE TRIGGER es_insert_issue AFTER INSERT ON public.issue FOR EACH ROW EXECUTE PROCEDURE insert_issue_data(); - +ALTER TABLE public.issue ENABLE ALWAYS TRIGGER es_insert_issue; CREATE OR REPLACE FUNCTION public.udpate_issue_comment() RETURNS trigger AS $def$ @@ -169,6 +169,8 @@ CREATE TRIGGER es_udpate_issue_comment AFTER DELETE OR UPDATE ON public.comment FOR EACH ROW EXECUTE PROCEDURE udpate_issue_comment(); +ALTER TABLE public.comment ENABLE ALWAYS TRIGGER es_udpate_issue_comment; + CREATE OR REPLACE FUNCTION public.update_issue() RETURNS trigger AS $def$ @@ -192,7 +194,7 @@ CREATE TRIGGER es_update_issue AFTER UPDATE ON public.issue FOR EACH ROW EXECUTE PROCEDURE update_issue(); - +ALTER TABLE public.issue ENABLE ALWAYS TRIGGER es_update_issue; CREATE OR REPLACE FUNCTION public.delete_issue() RETURNS trigger AS $def$ @@ -207,4 +209,6 @@ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS es_delete_issue on public.issue; CREATE TRIGGER es_delete_issue AFTER DELETE ON public.issue - FOR EACH ROW EXECUTE PROCEDURE delete_issue(); \ No newline at end of file + FOR EACH ROW EXECUTE PROCEDURE delete_issue(); + +ALTER TABLE public.issue ENABLE ALWAYS TRIGGER es_delete_issue; \ No newline at end of file diff --git a/models/dbsql/pr_foreigntable_for_es.sql b/models/dbsql/pr_foreigntable_for_es.sql index 6d28b7651..089f62060 100644 --- a/models/dbsql/pr_foreigntable_for_es.sql +++ b/models/dbsql/pr_foreigntable_for_es.sql @@ -136,7 +136,7 @@ CREATE TRIGGER es_insert_pull_request AFTER INSERT ON public.pull_request FOR EACH ROW EXECUTE PROCEDURE insert_pull_request_data(); - +ALTER TABLE public.pull_request ENABLE ALWAYS TRIGGER es_insert_pull_request; CREATE OR REPLACE FUNCTION public.update_pull_request() RETURNS trigger AS $def$ @@ -156,6 +156,7 @@ CREATE TRIGGER es_update_pull_request AFTER UPDATE ON public.pull_request FOR EACH ROW EXECUTE PROCEDURE update_pull_request(); +ALTER TABLE public.pull_request ENABLE ALWAYS TRIGGER es_update_pull_request; CREATE OR REPLACE FUNCTION public.delete_pull_request() RETURNS trigger AS $def$ @@ -170,4 +171,6 @@ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS es_delete_pull_request on public.pull_request; CREATE TRIGGER es_delete_pull_request AFTER DELETE ON public.pull_request - FOR EACH ROW EXECUTE PROCEDURE delete_pull_request(); \ No newline at end of file + FOR EACH ROW EXECUTE PROCEDURE delete_pull_request(); + +ALTER TABLE public.pull_request ENABLE ALWAYS TRIGGER es_delete_pull_request; \ No newline at end of file diff --git a/models/dbsql/repo_foreigntable_for_es.sql b/models/dbsql/repo_foreigntable_for_es.sql index 094fd4e75..ae6fe51b3 100644 --- a/models/dbsql/repo_foreigntable_for_es.sql +++ b/models/dbsql/repo_foreigntable_for_es.sql @@ -241,6 +241,7 @@ CREATE TRIGGER es_insert_repository AFTER INSERT ON public.repository FOR EACH ROW EXECUTE PROCEDURE insert_repository_data(); +ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_insert_repository; CREATE OR REPLACE FUNCTION public.update_repository() RETURNS trigger AS $def$ @@ -274,6 +275,7 @@ CREATE TRIGGER es_update_repository AFTER UPDATE ON public.repository FOR EACH ROW EXECUTE PROCEDURE update_repository(); +ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_update_repository; CREATE OR REPLACE FUNCTION public.delete_repository() RETURNS trigger AS $def$ @@ -290,6 +292,7 @@ CREATE TRIGGER es_delete_repository AFTER DELETE ON public.repository FOR EACH ROW EXECUTE PROCEDURE delete_repository(); +ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_delete_repository; CREATE OR REPLACE FUNCTION public.udpate_repository_lang() RETURNS trigger AS $def$ @@ -310,4 +313,6 @@ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS es_udpate_repository_lang on public.language_stat; CREATE TRIGGER es_udpate_repository_lang AFTER INSERT OR UPDATE OR DELETE ON public.language_stat - FOR EACH ROW EXECUTE PROCEDURE udpate_repository_lang(); \ No newline at end of file + FOR EACH ROW EXECUTE PROCEDURE udpate_repository_lang(); + +ALTER TABLE public.language_stat ENABLE ALWAYS TRIGGER es_udpate_repository_lang; \ No newline at end of file diff --git a/models/dbsql/user_foreigntable_for_es.sql b/models/dbsql/user_foreigntable_for_es.sql index cc9e56db6..a1a42ee12 100644 --- a/models/dbsql/user_foreigntable_for_es.sql +++ b/models/dbsql/user_foreigntable_for_es.sql @@ -251,6 +251,8 @@ CREATE TRIGGER es_insert_user AFTER INSERT ON public.user FOR EACH ROW EXECUTE PROCEDURE insert_user_data(); +ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_insert_user; + CREATE OR REPLACE FUNCTION public.update_user() RETURNS trigger AS $def$ @@ -274,7 +276,7 @@ CREATE TRIGGER es_update_user AFTER UPDATE ON public.user FOR EACH ROW EXECUTE PROCEDURE update_user(); - +ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_update_user; CREATE OR REPLACE FUNCTION public.delete_user() RETURNS trigger AS $def$ @@ -289,4 +291,6 @@ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS es_delete_user on public.user; CREATE TRIGGER es_delete_user AFTER DELETE ON public.user - FOR EACH ROW EXECUTE PROCEDURE delete_user(); \ No newline at end of file + FOR EACH ROW EXECUTE PROCEDURE delete_user(); + +ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_delete_user; \ No newline at end of file diff --git a/routers/search.go b/routers/search.go index dce250748..2dbc0f2b2 100644 --- a/routers/search.go +++ b/routers/search.go @@ -175,10 +175,10 @@ func makeRepoResult(sRes *elastic.SearchResult, Key string) *SearchRes { record["lang"] = recordSource["lang"] result = append(result, record) } else { - log.Info("deal source error," + err.Error()) + log.Info("deal repo source error," + err.Error()) } } else { - log.Info("deal source error," + err.Error()) + log.Info("deal repo source error," + err.Error()) } } @@ -299,10 +299,10 @@ func makeUserOrOrgResult(sRes *elastic.SearchResult, Key string, ctx *context.Co record["created_unix"] = recordSource["created_unix"] result = append(result, record) } else { - log.Info("deal source error," + err.Error()) + log.Info("deal user source error," + err.Error()) } } else { - log.Info("deal source error," + err.Error()) + log.Info("deal user source error," + err.Error()) } } @@ -326,6 +326,82 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, download_times */ + SortBy := ctx.Query("SortBy") + if SortBy == "" { + SortBy = "download_times.keyword" + } + ascending := ctx.QueryBool("Ascending") + log.Info("query searchRepo start") + boolQ := elastic.NewBoolQuery() + if Key != "" { + nameQuery := elastic.NewMatchQuery("title", Key).Boost(2).QueryName("f_first") + descQuery := elastic.NewMatchQuery("description", Key).Boost(1.5).QueryName("f_second") + fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") + categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") + boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) + res, err := client.Search(TableName).Query(boolQ).Sort(SortBy, ascending).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) + if err == nil { + result := makeDatasetResult(res, Key) + ctx.JSON(200, result) + } else { + log.Info("query es error," + err.Error()) + } + } else { + log.Info("query all content.") + //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} + res, err := client.Search(TableName).Sort(SortBy, ascending).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) + if err == nil { + result := makeRepoResult(res, "") + ctx.JSON(200, result) + } else { + log.Info("query es error," + err.Error()) + ctx.JSON(200, "") + } + } + +} + +func makeDatasetResult(sRes *elastic.SearchResult, Key string) *SearchRes { + total := sRes.Hits.TotalHits.Value + result := make([]map[string]interface{}, 0) + + for i, hit := range sRes.Hits.Hits { + log.Info("this is dataset query " + fmt.Sprint(i) + " result.") + recordSource := make(map[string]interface{}) + source, err := hit.Source.MarshalJSON() + + if err == nil { + err = json.Unmarshal(source, &recordSource) + if err == nil { + record := make(map[string]interface{}) + record["id"] = recordSource["id"] + userId := recordSource["user_id"].(int64) + user, errUser := models.GetUserByID(userId) + if errUser == nil { + record["owerName"] = user.GetDisplayName() + record["avatar"] = user.RelAvatarLink() + } + record["title"] = recordSource["title"] + record["category"] = recordSource["category"] + desc := recordSource["description"].(string) + record["description"] = dealLongText(desc, Key, hit.MatchedQueries) + record["download_times"] = recordSource["download_times"] + record["created_unix"] = recordSource["created_unix"] + result = append(result, record) + } else { + log.Info("deal dataset source error," + err.Error()) + } + } else { + log.Info("deal dataset source error," + err.Error()) + } + } + + returnObj := &SearchRes{ + Total: total, + Result: result, + } + + return returnObj } func searchIssue(ctx *context.Context, TableName string, Key string, Page int, PageSize int) { @@ -339,7 +415,11 @@ func searchIssue(ctx *context.Context, TableName string, Key string, Page int, P 排序: updated_unix */ - + SortBy := ctx.Query("SortBy") + if SortBy == "" { + SortBy = "updated_unix.keyword" + } + ascending := ctx.QueryBool("Ascending") boolQ := elastic.NewBoolQuery() if Key != "" { nameQuery := elastic.NewMatchQuery("name", Key).Boost(2).QueryName("f_first") @@ -349,7 +429,7 @@ func searchIssue(ctx *context.Context, TableName string, Key string, Page int, P } isIssueQuery := elastic.NewTermQuery("is_pull", false) boolQ.Must(isIssueQuery) - res, err := client.Search(TableName).Query(boolQ).Sort("updated_unix.keyword", false).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).Sort(SortBy, ascending).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) if err == nil { result := makeIssueResult(res, Key) ctx.JSON(200, result) @@ -377,6 +457,7 @@ func makeIssueResult(sRes *elastic.SearchResult, Key string) *SearchRes { repo, errRepo := models.GetRepositoryByID(recordSource["repo_id"].(int64)) if errRepo == nil { record["repoUrl"] = repo.FullName() + record["avatar"] = repo.RelAvatarLink() } record["name"] = recordSource["name"] desc := recordSource["content"].(string) @@ -391,10 +472,10 @@ func makeIssueResult(sRes *elastic.SearchResult, Key string) *SearchRes { record["updated_unix"] = recordSource["updated_unix"] result = append(result, record) } else { - log.Info("deal source error," + err.Error()) + log.Info("deal issue source error," + err.Error()) } } else { - log.Info("deal source error," + err.Error()) + log.Info("deal issue source error," + err.Error()) } } @@ -417,7 +498,11 @@ func searchPR(ctx *context.Context, TableName string, Key string, Page int, Page 排序: updated_unix */ - + SortBy := ctx.Query("SortBy") + if SortBy == "" { + SortBy = "updated_unix.keyword" + } + ascending := ctx.QueryBool("Ascending") boolQ := elastic.NewBoolQuery() if Key != "" { nameQuery := elastic.NewMatchQuery("name", Key).Boost(2).QueryName("f_first") @@ -427,7 +512,7 @@ func searchPR(ctx *context.Context, TableName string, Key string, Page int, Page } isIssueQuery := elastic.NewTermQuery("is_pull", true) boolQ.Must(isIssueQuery) - res, err := client.Search(TableName).Query(boolQ).Sort("updated_unix.keyword", false).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).Sort(SortBy, ascending).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) if err == nil { result := makeIssueResult(res, Key) ctx.JSON(200, result)