From e194cfe9c4d5d298b37e34a5f09a55af70592e3f Mon Sep 17 00:00:00 2001 From: zouap Date: Mon, 28 Mar 2022 16:01:18 +0800 Subject: [PATCH] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E6=95=B0=E6=8D=AE=E9=9B=86?= =?UTF-8?q?=E7=9A=84=E7=A7=81=E6=9C=89=E6=90=9C=E7=B4=A2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- models/dbsql/dataset_foreigntable_for_es.sql | 14 ++- models/dbsql/repo_foreigntable_for_es.sql | 10 +- models/dbsql/user_foreigntable_for_es.sql | 30 ++++-- models/search_record.go | 32 +++++- routers/search.go | 103 +++++++++++++++++-- 5 files changed, 162 insertions(+), 27 deletions(-) diff --git a/models/dbsql/dataset_foreigntable_for_es.sql b/models/dbsql/dataset_foreigntable_for_es.sql index ecc12b8ac..fec4b91ff 100644 --- a/models/dbsql/dataset_foreigntable_for_es.sql +++ b/models/dbsql/dataset_foreigntable_for_es.sql @@ -55,6 +55,9 @@ DELETE FROM public.dataset_es; b.updated_unix,(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment a where a.dataset_id=b.id and a.is_private=false) FROM public.dataset b,public.repository c where b.repo_id=c.id and c.is_private=false; + +DROP TRIGGER IF EXISTS es_insert_dataset on public.dataset; + CREATE OR REPLACE FUNCTION public.insert_dataset_data() RETURNS trigger AS $def$ DECLARE @@ -97,7 +100,7 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_insert_dataset on public.dataset; + CREATE TRIGGER es_insert_dataset AFTER INSERT ON public.dataset @@ -105,6 +108,9 @@ CREATE TRIGGER es_insert_dataset ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_insert_dataset; + +DROP TRIGGER IF EXISTS es_udpate_dataset_file_name_delete on public.attachment; + CREATE OR REPLACE FUNCTION public.udpate_dataset_file_name_delete() RETURNS trigger AS $def$ BEGIN @@ -114,7 +120,7 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_udpate_dataset_file_name_delete on public.attachment; + CREATE TRIGGER es_udpate_dataset_file_name_delete AFTER DELETE ON public.attachment FOR EACH ROW EXECUTE PROCEDURE udpate_dataset_file_name_delete(); @@ -145,6 +151,8 @@ CREATE TRIGGER es_update_dataset ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_update_dataset; +DROP TRIGGER IF EXISTS es_delete_dataset on public.dataset; + CREATE OR REPLACE FUNCTION public.delete_dataset() RETURNS trigger AS $def$ declare @@ -155,7 +163,7 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_delete_dataset on public.dataset; + CREATE TRIGGER es_delete_dataset AFTER DELETE ON public.dataset FOR EACH ROW EXECUTE PROCEDURE delete_dataset(); diff --git a/models/dbsql/repo_foreigntable_for_es.sql b/models/dbsql/repo_foreigntable_for_es.sql index 430015926..f51155ccf 100644 --- a/models/dbsql/repo_foreigntable_for_es.sql +++ b/models/dbsql/repo_foreigntable_for_es.sql @@ -156,6 +156,7 @@ delete from public.repository_es; lower_alias FROM public.repository b where b.is_private=false; +DROP TRIGGER IF EXISTS es_insert_repository on public.repository; CREATE OR REPLACE FUNCTION public.insert_repository_data() RETURNS trigger AS $def$ @@ -257,8 +258,6 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_insert_repository on public.repository; - CREATE TRIGGER es_insert_repository AFTER INSERT ON public.repository @@ -483,6 +482,9 @@ CREATE TRIGGER es_update_repository ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_update_repository; + +DROP TRIGGER IF EXISTS es_delete_repository on public.repository; + CREATE OR REPLACE FUNCTION public.delete_repository() RETURNS trigger AS $def$ declare @@ -495,7 +497,7 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_delete_repository on public.repository; + CREATE TRIGGER es_delete_repository AFTER DELETE ON public.repository FOR EACH ROW EXECUTE PROCEDURE delete_repository(); @@ -514,7 +516,7 @@ $def$ elsif (TG_OP = 'INSERT') then update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id; elsif (TG_OP = 'DELETE') then - if exists(select 1 from public.repository where id=OLD.repo_id) + if exists(select 1 from public.repository where id=OLD.repo_id) then update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=OLD.repo_id) where id=OLD.repo_id; end if; end if; diff --git a/models/dbsql/user_foreigntable_for_es.sql b/models/dbsql/user_foreigntable_for_es.sql index ded554c0f..c3d21b92a 100644 --- a/models/dbsql/user_foreigntable_for_es.sql +++ b/models/dbsql/user_foreigntable_for_es.sql @@ -49,7 +49,8 @@ CREATE FOREIGN TABLE public.user_es token character varying(1024) , public_key character varying(255), private_key character varying(255), - is_operator boolean NOT NULL DEFAULT false + is_operator boolean NOT NULL DEFAULT false, + num_dataset_stars integer NOT NULL DEFAULT 0 ) SERVER multicorn_es OPTIONS ( @@ -103,7 +104,8 @@ delete from public.user_es; repo_admin_change_team_access, diff_view_style, theme, - is_operator) + is_operator, + num_dataset_stars) SELECT id, lower_name, @@ -146,9 +148,12 @@ delete from public.user_es; repo_admin_change_team_access, diff_view_style, theme, - is_operator + is_operator, + num_dataset_stars FROM public.user; +DROP TRIGGER IF EXISTS es_insert_user on public.user; + CREATE OR REPLACE FUNCTION public.insert_user_data() RETURNS trigger AS $def$ BEGIN @@ -194,7 +199,8 @@ $def$ repo_admin_change_team_access, diff_view_style, theme, - is_operator) + is_operator, + num_dataset_stars) VALUES ( NEW.id, NEW.lower_name, @@ -237,7 +243,8 @@ $def$ NEW.repo_admin_change_team_access, NEW.diff_view_style, NEW.theme, - NEW.is_operator + NEW.is_operator, + NEW.num_dataset_stars ); RETURN NEW; @@ -245,7 +252,7 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_insert_user on public.user; + CREATE TRIGGER es_insert_user AFTER INSERT ON public.user @@ -253,6 +260,7 @@ CREATE TRIGGER es_insert_user ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_insert_user; +DROP TRIGGER IF EXISTS es_update_user on public.user; CREATE OR REPLACE FUNCTION public.update_user() RETURNS trigger AS $def$ @@ -263,14 +271,16 @@ $def$ full_name=NEW.full_name, location=NEW.location, website=NEW.website, - email=NEW.email + email=NEW.email, + num_dataset_stars=NEW.num_dataset_stars, + updated_unix=NEW.updated_unix where id=NEW.id; return new; END $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_update_user on public.user; + CREATE TRIGGER es_update_user AFTER UPDATE ON public.user @@ -278,6 +288,8 @@ CREATE TRIGGER es_update_user ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_update_user; +DROP TRIGGER IF EXISTS es_delete_user on public.user; + CREATE OR REPLACE FUNCTION public.delete_user() RETURNS trigger AS $def$ declare @@ -288,7 +300,7 @@ $def$ $def$ LANGUAGE plpgsql; -DROP TRIGGER IF EXISTS es_delete_user on public.user; + CREATE TRIGGER es_delete_user AFTER DELETE ON public.user FOR EACH ROW EXECUTE PROCEDURE delete_user(); diff --git a/models/search_record.go b/models/search_record.go index 91b951e0e..706abcbaa 100644 --- a/models/search_record.go +++ b/models/search_record.go @@ -28,7 +28,7 @@ func SaveSearchKeywordToDb(keyword string) error { return nil } -func setQueryCondition(sess *xorm.Session, Keyword string, isPull bool, userId int64) { +func setIssueQueryCondition(sess *xorm.Session, Keyword string, isPull bool, userId int64) { sess.And("issue.poster_id=?", userId) sess.And("issue.is_pull=?", isPull) sess.And("(issue.name like '%" + Keyword + "%' or issue.content like '%" + Keyword + "%')") @@ -38,13 +38,13 @@ func setQueryCondition(sess *xorm.Session, Keyword string, isPull bool, userId i func SearchPrivateIssueOrPr(Page int, PageSize int, Keyword string, isPull bool, userId int64) ([]*Issue, int64, error) { sess := x.NewSession() defer sess.Close() - setQueryCondition(sess, Keyword, isPull, userId) + setIssueQueryCondition(sess, Keyword, isPull, userId) count, err := sess.Count(new(Issue)) if err != nil { return nil, 0, err } - setQueryCondition(sess, Keyword, isPull, userId) + setIssueQueryCondition(sess, Keyword, isPull, userId) sess.Desc("issue.created_unix") sess.Limit(PageSize, (Page-1)*PageSize) issues := make([]*Issue, 0) @@ -53,5 +53,31 @@ func SearchPrivateIssueOrPr(Page int, PageSize int, Keyword string, isPull bool, } else { return issues, count, nil } +} + +func setDataSetQueryCondition(sess *xorm.Session, Keyword string, userId int64) { + sess.And("dataset.user_id=?", userId) + sess.And("(dataset.title like '%" + Keyword + "%' or dataset.description like '%" + Keyword + "%')") + sess.Join("INNER", "repository", "dataset.repo_id = repository.id").And("repository.is_private = ?", true) +} + +func SearchDatasetBySQL(Page int, PageSize int, Keyword string, userId int64) ([]*Dataset, int64, error) { + sess := x.NewSession() + defer sess.Close() + setDataSetQueryCondition(sess, Keyword, userId) + count, err := sess.Count(new(Dataset)) + if err != nil { + return nil, 0, err + } + + setDataSetQueryCondition(sess, Keyword, userId) + sess.Desc("issue.created_unix") + sess.Limit(PageSize, (Page-1)*PageSize) + datasets := make([]*Dataset, 0) + if err := sess.Find(&datasets); err != nil { + return nil, 0, err + } else { + return datasets, count, nil + } } diff --git a/routers/search.go b/routers/search.go index 912d7707a..fb7b7dfa0 100644 --- a/routers/search.go +++ b/routers/search.go @@ -770,9 +770,54 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, download_times */ + log.Info("query searchdataset start") SortBy := ctx.Query("SortBy") ascending := ctx.QueryBool("Ascending") - log.Info("query searchRepo start") + PrivateTotal := ctx.QueryInt("PrivateTotal") + WebTotal := ctx.QueryInt("WebTotal") + + from := (Page - 1) * PageSize + if from == 0 { + WebTotal = 0 + } + resultObj := &SearchRes{} + log.Info("WebTotal=" + fmt.Sprint(WebTotal)) + log.Info("PrivateTotal=" + fmt.Sprint(PrivateTotal)) + resultObj.Result = make([]map[string]interface{}, 0) + + if ctx.User != nil && (from < PrivateTotal || from == 0) { + + log.Info("actor is null?:" + fmt.Sprint(ctx.User == nil)) + datasets, count, err := models.SearchDatasetBySQL(Page, PageSize, Key, ctx.User.ID) + if err != nil { + ctx.JSON(200, "") + return + } + resultObj.PrivateTotal = count + datasetSize := len(datasets) + if datasetSize > 0 { + log.Info("Query private dataset number is:" + fmt.Sprint(datasetSize) + " count=" + fmt.Sprint(count)) + makePrivateDataSet(datasets, resultObj, Key) + } else { + log.Info("not found private dataset, keyword=" + Key) + } + if datasetSize >= PageSize { + if WebTotal > 0 { //next page, not first query. + resultObj.Total = int64(WebTotal) + ctx.JSON(200, resultObj) + return + } + } + } else { + resultObj.PrivateTotal = int64(PrivateTotal) + } + + from = from - PrivateTotal + if from < 0 { + from = 0 + } + Size := PageSize - len(resultObj.Result) + boolQ := elastic.NewBoolQuery() if Key != "" { nameQuery := elastic.NewMatchQuery("title", Key).Boost(2).QueryName("f_first") @@ -780,24 +825,30 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) - result := makeDatasetResult(res, Key, OnlyReturnNum) - ctx.JSON(200, result) + esresult := makeDatasetResult(res, Key, OnlyReturnNum) + resultObj.Total = resultObj.PrivateTotal + esresult.Total + log.Info("query dataset es count=" + fmt.Sprint(esresult.Total) + " total=" + fmt.Sprint(resultObj.Total)) + resultObj.Result = append(resultObj.Result, esresult.Result...) + ctx.JSON(200, resultObj) } else { log.Info("query es error," + err.Error()) } } else { - log.Info("query all content.") + log.Info("query all datasets.") //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} - res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Do(ctx.Req.Context()) + res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) - result := makeDatasetResult(res, "", OnlyReturnNum) - ctx.JSON(200, result) + esresult := makeDatasetResult(res, "", OnlyReturnNum) + resultObj.Total = resultObj.PrivateTotal + esresult.Total + log.Info("query dataset es count=" + fmt.Sprint(esresult.Total) + " total=" + fmt.Sprint(resultObj.Total)) + resultObj.Result = append(resultObj.Result, esresult.Result...) + ctx.JSON(200, resultObj) } else { log.Info("query es error," + err.Error()) ctx.JSON(200, "") @@ -806,6 +857,42 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, } +func makePrivateDataSet(datasets []*models.Dataset, res *SearchRes, Key string) { + for _, dataset := range datasets { + record := make(map[string]interface{}) + + record["id"] = dataset.ID + userId := dataset.UserID + + user, errUser := models.GetUserByID(userId) + if errUser == nil { + record["owerName"] = user.GetDisplayName() + record["avatar"] = user.RelAvatarLink() + } + + repo, errRepo := models.GetRepositoryByID(dataset.RepoID) + if errRepo == nil { + log.Info("repo_url=" + repo.FullName()) + record["repoUrl"] = repo.FullName() + record["avatar"] = repo.RelAvatarLink() + } else { + log.Info("repo err=" + errRepo.Error()) + } + + record["title"] = makeHighLight(Key, dataset.Title) + record["description"] = truncLongText(makeHighLight(Key, dataset.Description), true) + + record["category"] = dataset.Category + record["task"] = dataset.Task + record["download_times"] = dataset.DownloadTimes + record["created_unix"] = dataset.CreatedUnix + record["updated_unix"] = repo.UpdatedUnix + record["updated_html"] = timeutil.TimeSinceUnix(repo.UpdatedUnix, "zh-CN") + + res.Result = append(res.Result, record) + } +} + func makeDatasetResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool) *SearchRes { total := sRes.Hits.TotalHits.Value result := make([]map[string]interface{}, 0)