@@ -6,6 +6,8 @@ import ( | |||
"sort" | |||
"strings" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
@@ -88,7 +90,7 @@ func (datasets DatasetList) loadAttributes(e Engine) error { | |||
if err := e. | |||
Where("id > 0"). | |||
In("id", keysInt64(set)). | |||
Cols("id", "owner_id", "owner_name", "lower_name", "name", "description", "alias", "lower_alias","is_private"). | |||
Cols("id", "owner_id", "owner_name", "lower_name", "name", "description", "alias", "lower_alias", "is_private"). | |||
Find(&repos); err != nil { | |||
return fmt.Errorf("find repos: %v", err) | |||
} | |||
@@ -126,7 +128,7 @@ func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) | |||
permission = false | |||
datasets[i].Repo.GetOwner() | |||
if datasets[i].Repo.Owner.IsOrganization() { | |||
if datasets[i].Repo.Owner.IsOrganization() { | |||
if datasets[i].Repo.Owner.IsUserPartOfOrg(opts.User.ID) { | |||
log.Info("user is member of org.") | |||
permission = true | |||
@@ -140,10 +142,10 @@ func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) | |||
} | |||
} | |||
permissionMap[datasets[i].ID]=permission | |||
permissionMap[datasets[i].ID] = permission | |||
} | |||
if permission{ | |||
if permission { | |||
datasets[i].Attachments = append(datasets[i].Attachments, attachment) | |||
} else if !attachment.IsPrivate { | |||
datasets[i].Attachments = append(datasets[i].Attachments, attachment) | |||
@@ -329,13 +331,15 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da | |||
return nil, 0, fmt.Errorf("Count: %v", err) | |||
} | |||
sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id"). | |||
builderQuery := builder.Dialect(setting.Database.Type).Select("id", "title", "status", "category", "description", "download_times", "license", "task", "release_id", "user_id", "repo_id", "created_unix", "updated_unix", "num_stars", "recommend", "use_count").From(builder.Dialect(setting.Database.Type).Select(selectColumnsSql).From("dataset").Join("INNER", "repository", "repository.id = dataset.repo_id"). | |||
Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). | |||
Where(cond).OrderBy(opts.SearchOrderBy.String()) | |||
Where(cond), "d").OrderBy(opts.SearchOrderBy.String()) | |||
if opts.PageSize > 0 { | |||
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) | |||
builderQuery.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) | |||
} | |||
if err = sess.Find(&datasets); err != nil { | |||
if err = sess.SQL(builderQuery).Find(&datasets); err != nil { | |||
return nil, 0, fmt.Errorf("Dataset: %v", err) | |||
} | |||
@@ -1,42 +1,87 @@ | |||
package models | |||
import "code.gitea.io/gitea/modules/timeutil" | |||
import ( | |||
"strconv" | |||
"strings" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
) | |||
type DatasetReference struct { | |||
ID int64 `xorm:"pk autoincr"` | |||
RepoID int64 `xorm:"INDEX unique(repo_dataset)"` | |||
DatasetID int64 `xorm:"INDEX unique(repo_dataset)"` | |||
RepoID int64 `xorm:"INDEX unique"` | |||
DatasetID string `xorm:"TEXT"` | |||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | |||
} | |||
func GetDatasetIdsByRepoID(repoID int64) []int64 { | |||
var datasets []int64 | |||
var datasetIds []string | |||
_ = x.Table("dataset_reference").Where("repo_id=?", repoID). | |||
Cols("dataset_reference.dataset_id").Find(&datasets) | |||
Cols("dataset_reference.dataset_id").Find(&datasetIds) | |||
if len(datasetIds) > 0 { | |||
for _, datasetIdStr := range strings.Split(datasetIds[0], ",") { | |||
datasetId, err := strconv.ParseInt(datasetIdStr, 10, 64) | |||
if err != nil { | |||
continue | |||
} | |||
datasets = append(datasets, datasetId) | |||
} | |||
} | |||
return datasets | |||
} | |||
func DeleteDatasetIdsByRepoID(repoID int64, datasetIds []int64) error { | |||
if len(datasetIds) == 0 { | |||
return nil | |||
func HasReferenceDataset(repoID int64) bool { | |||
var datasetIds []string | |||
_ = x.Table("dataset_reference").Where("repo_id=?", repoID). | |||
Cols("dataset_reference.dataset_id").Find(&datasetIds) | |||
return len(datasetIds) > 0 | |||
} | |||
func getReferenceDatasetStr(repoID int64) string { | |||
var datasetIds []string | |||
_ = x.Table("dataset_reference").Where("repo_id=?", repoID). | |||
Cols("dataset_reference.dataset_id").Find(&datasetIds) | |||
if len(datasetIds) > 0 { | |||
return datasetIds[0] | |||
} | |||
_, err := x.In("dataset_id", datasetIds).And("repo_id", repoID).Delete(new(DatasetReference)) | |||
return "" | |||
} | |||
func deleteDatasetIdsByRepoID(repoID int64) error { | |||
_, err := x.Where("repo_id", repoID).Delete(new(DatasetReference)) | |||
return err | |||
} | |||
func NewDatasetIdsByRepoID(repoID int64, datasetIds []int64) error { | |||
var datasetReference []DatasetReference | |||
if len(datasetIds) == 0 { //关联数据集数组为空,不需要处理 | |||
return nil | |||
} | |||
var datasetsStrArray []string | |||
for _, datasetId := range datasetIds { | |||
datasetReference = append(datasetReference, DatasetReference{ | |||
DatasetID: datasetId, | |||
RepoID: repoID, | |||
}) | |||
datasetsStrArray = append(datasetsStrArray, strconv.FormatInt(datasetId, 10)) | |||
} | |||
_, err := x.Insert(datasetReference) | |||
return err | |||
} | |||
func DeleteReferenceDatasetByDatasetID(datasetId int64) error { | |||
_, err := x.Delete(&DatasetReference{DatasetID: datasetId}) | |||
newDatasetStr := strings.Join(datasetsStrArray, ",") | |||
oldDatasetStr := getReferenceDatasetStr(repoID) | |||
if newDatasetStr == oldDatasetStr { //关联数据集无变化,不需要处理 | |||
return nil | |||
} | |||
if oldDatasetStr != "" { //已经存在关联数据集 | |||
err := deleteDatasetIdsByRepoID(repoID) | |||
if err != nil { | |||
return err | |||
} | |||
} | |||
datasetReference := DatasetReference{ | |||
DatasetID: newDatasetStr, | |||
RepoID: repoID, | |||
} | |||
_, err := x.Insert(datasetReference) | |||
return err | |||
} |
@@ -145,7 +145,7 @@ func init() { | |||
new(OrgStatistic), | |||
new(SearchRecord), | |||
new(AiModelConvert), | |||
new(CloudbrainTemp), | |||
new(DatasetReference), | |||
) | |||
tablesStatistic = append(tablesStatistic, | |||
@@ -27,7 +27,7 @@ func IsShowDataSetOfCurrentRepo(repoID int64) bool { | |||
if dataset != nil { | |||
return true | |||
} | |||
if len(models.GetDatasetIdsByRepoID(repoID)) > 0 { | |||
if models.HasReferenceDataset(repoID) { | |||
return false | |||
} | |||
return true | |||
@@ -274,7 +274,16 @@ func ReferenceDatasetDelete(ctx *context.Context) { | |||
repoID := ctx.Repo.Repository.ID | |||
datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) | |||
err := models.DeleteDatasetIdsByRepoID(repoID, []int64{datasetId}) | |||
oldDatasetIds := models.GetDatasetIdsByRepoID(repoID) | |||
var newDatasetIds []int64 | |||
for _, tempDatasetId := range oldDatasetIds { | |||
if datasetId != tempDatasetId { | |||
newDatasetIds = append(newDatasetIds, datasetId) | |||
} | |||
} | |||
err := models.NewDatasetIdsByRepoID(repoID, newDatasetIds) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessage("dataset.cancel_reference_dataset_fail")) | |||
} | |||
@@ -284,29 +293,7 @@ func ReferenceDatasetDelete(ctx *context.Context) { | |||
func ReferenceDatasetPost(ctx *context.Context, form auth.ReferenceDatasetForm) { | |||
repoID := ctx.Repo.Repository.ID | |||
datasetsID := models.GetDatasetIdsByRepoID(repoID) | |||
var newDataset []int64 | |||
var deletedDataset []int64 | |||
for _, id := range datasetsID { | |||
if !contains(form.DatasetID, id) { | |||
deletedDataset = append(deletedDataset, id) | |||
} | |||
} | |||
for _, id := range form.DatasetID { | |||
if !contains(datasetsID, id) { | |||
newDataset = append(newDataset, id) | |||
} | |||
} | |||
err := models.DeleteDatasetIdsByRepoID(repoID, deletedDataset) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.reference_dataset_fail"))) | |||
} | |||
err = models.NewDatasetIdsByRepoID(repoID, newDataset) | |||
err := models.NewDatasetIdsByRepoID(repoID, form.DatasetID) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessage("dataset.reference_dataset_fail")) | |||
} | |||
@@ -315,15 +302,6 @@ func ReferenceDatasetPost(ctx *context.Context, form auth.ReferenceDatasetForm) | |||
} | |||
func contains(s []int64, e int64) bool { | |||
for _, a := range s { | |||
if a == e { | |||
return true | |||
} | |||
} | |||
return false | |||
} | |||
func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) { | |||
ctx.Data["PageIsDataset"] = true | |||
@@ -467,13 +445,23 @@ func MyDatasets(ctx *context.Context) { | |||
} | |||
func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) { | |||
datasets, count, err := datasetMultipleGet(ctx, opts) | |||
page := ctx.QueryInt("page") | |||
keyword := strings.Trim(ctx.Query("q"), " ") | |||
opts.Keyword = keyword | |||
if opts.SearchOrderBy.String() == "" { | |||
opts.SearchOrderBy = models.SearchOrderByRecentUpdated | |||
} | |||
datasetMultipleResult(ctx, err, datasets, count) | |||
opts.RecommendOnly = ctx.QueryBool("recommend") | |||
opts.ListOptions = models.ListOptions{ | |||
Page: page, | |||
PageSize: setting.UI.DatasetPagingNum, | |||
} | |||
opts.JustNeedZipFile = true | |||
opts.User = ctx.User | |||
} | |||
datasets, count, err := models.SearchDataset(opts) | |||
func datasetMultipleResult(ctx *context.Context, err error, datasets models.DatasetList, count int64) { | |||
if err != nil { | |||
ctx.ServerError("datasets", err) | |||
return | |||
@@ -496,52 +484,31 @@ func datasetMultipleResult(ctx *context.Context, err error, datasets models.Data | |||
}) | |||
} | |||
func datasetMultipleGet(ctx *context.Context, opts *models.SearchDatasetOptions) (models.DatasetList, int64, error) { | |||
page := ctx.QueryInt("page") | |||
keyword := strings.Trim(ctx.Query("q"), " ") | |||
orderBy := models.SearchOrderByRecentUpdated | |||
opts.Keyword = keyword | |||
opts.SearchOrderBy = orderBy | |||
opts.RecommendOnly = ctx.QueryBool("recommend") | |||
opts.ListOptions = models.ListOptions{ | |||
Page: page, | |||
PageSize: setting.UI.DatasetPagingNum, | |||
} | |||
opts.JustNeedZipFile = true | |||
opts.User = ctx.User | |||
datasets, count, err := models.SearchDataset(opts) | |||
return datasets, count, err | |||
} | |||
func CurrentRepoDatasetMultiple(ctx *context.Context) { | |||
datasetIds := models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID) | |||
searchOrderBy := getSearchOrderByInValues(datasetIds) | |||
opts := &models.SearchDatasetOptions{ | |||
RepoID: ctx.Repo.Repository.ID, | |||
NeedAttachment: true, | |||
CloudBrainType: ctx.QueryInt("type"), | |||
DatasetIDs: models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID), | |||
DatasetIDs: datasetIds, | |||
SearchOrderBy: searchOrderBy, | |||
} | |||
datasetList, count, err := datasetMultipleGet(ctx, opts) | |||
if len(datasetList) > 0 { | |||
var convertDatasetList models.DatasetList | |||
datasetMultiple(ctx, opts) | |||
for _, dataset := range datasetList { | |||
if dataset.RepoID == ctx.Repo.Repository.ID && len(convertDatasetList) == 0 { | |||
convertDatasetList = append(convertDatasetList, dataset) | |||
} | |||
} | |||
for _, dataset := range datasetList { | |||
if dataset.RepoID != ctx.Repo.Repository.ID { | |||
convertDatasetList = append(convertDatasetList, dataset) | |||
} | |||
} | |||
datasetMultipleResult(ctx, err, convertDatasetList, count) | |||
} else { | |||
datasetMultipleResult(ctx, err, datasetList, count) | |||
} | |||
} | |||
func getSearchOrderByInValues(datasetIds []int64) models.SearchOrderBy { | |||
if len(datasetIds) == 0 { | |||
return "" | |||
} | |||
searchOrderBy := "CASE id " | |||
for i, id := range datasetIds { | |||
searchOrderBy += fmt.Sprintf(" WHEN %d THEN %d", id, i+1) | |||
} | |||
searchOrderBy += " ELSE 0 END" | |||
return models.SearchOrderBy(searchOrderBy) | |||
} | |||
func MyDatasetsMultiple(ctx *context.Context) { | |||
@@ -589,17 +556,18 @@ func MyFavoriteDatasetMultiple(ctx *context.Context) { | |||
} | |||
func ReferenceDataset(ctx *context.Context) { | |||
MustEnableDataset(ctx) | |||
datasetIds := models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID) | |||
opts := &models.SearchDatasetOptions{ | |||
DatasetIDs: models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID), | |||
DatasetIDs: datasetIds, | |||
NeedAttachment: false, | |||
CloudBrainType: models.TypeCloudBrainAll, | |||
ListOptions: models.ListOptions{ | |||
Page: 1, | |||
PageSize: setting.RepoMaxReferenceDatasetNum, | |||
}, | |||
SearchOrderBy: models.SearchOrderByRecentUpdated, | |||
SearchOrderBy: getSearchOrderByInValues(datasetIds), | |||
} | |||
datasets, count, err := models.SearchDataset(opts) | |||
datasets, _, err := models.SearchDataset(opts) | |||
if err != nil { | |||
ctx.ServerError("SearchDatasets", err) | |||
@@ -608,7 +576,7 @@ func ReferenceDataset(ctx *context.Context) { | |||
ctx.Data["Datasets"] = repository.ConvertToDatasetWithStar(ctx, datasets) | |||
ctx.Data["PageIsDataset"] = true | |||
ctx.Data["Total"] = count | |||
ctx.Data["MaxReferenceDatasetNum"] = setting.RepoMaxReferenceDatasetNum | |||
ctx.HTML(200, tplReference) | |||
} | |||