@@ -13,13 +13,13 @@ const ( | |||||
type FileChunk struct { | type FileChunk struct { | ||||
ID int64 `xorm:"pk autoincr"` | ID int64 `xorm:"pk autoincr"` | ||||
UUID string `xorm:"uuid UNIQUE"` | UUID string `xorm:"uuid UNIQUE"` | ||||
Md5 string `xorm:"UNIQUE"` | |||||
Md5 string `xorm:"INDEX"` | |||||
IsUploaded int `xorm:"DEFAULT 0"` // not uploaded: 0, uploaded: 1 | IsUploaded int `xorm:"DEFAULT 0"` // not uploaded: 0, uploaded: 1 | ||||
UploadID string `xorm:"UNIQUE"`//minio upload id | UploadID string `xorm:"UNIQUE"`//minio upload id | ||||
TotalChunks int | TotalChunks int | ||||
Size int64 | Size int64 | ||||
UserID int64 `xorm:"INDEX"` | UserID int64 `xorm:"INDEX"` | ||||
CompletedParts []string // chunkNumber+etag eg: ,1-asqwewqe21312312.2-123hjkas | |||||
CompletedParts []string `xorm:"DEFAULT """`// chunkNumber+etag eg: ,1-asqwewqe21312312.2-123hjkas | |||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | ||||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | ||||
} | } | ||||
@@ -40,6 +40,22 @@ func getFileChunkByMD5(e Engine, md5 string) (*FileChunk, error) { | |||||
return fileChunk, nil | return fileChunk, nil | ||||
} | } | ||||
// GetFileChunkByMD5 returns fileChunk by given id | |||||
func GetFileChunkByMD5AndUser(md5 string, userID int64) (*FileChunk, error) { | |||||
return getFileChunkByMD5AndUser(x, md5, userID) | |||||
} | |||||
func getFileChunkByMD5AndUser(e Engine, md5 string, userID int64) (*FileChunk, error) { | |||||
fileChunk := new(FileChunk) | |||||
if has, err := e.Where("md5 = ? and user_id = ?", md5, userID).Get(fileChunk); err != nil { | |||||
return nil, err | |||||
} else if !has { | |||||
return nil, ErrFileChunkNotExist{md5, ""} | |||||
} | |||||
return fileChunk, nil | |||||
} | |||||
// GetAttachmentByID returns attachment by given id | // GetAttachmentByID returns attachment by given id | ||||
func GetFileChunkByUUID(uuid string) (*FileChunk, error) { | func GetFileChunkByUUID(uuid string) (*FileChunk, error) { | ||||
return getFileChunkByUUID(x, uuid) | return getFileChunkByUUID(x, uuid) | ||||
@@ -1,10 +1,10 @@ | |||||
package storage | package storage | ||||
import ( | import ( | ||||
"encoding/json" | |||||
"encoding/xml" | "encoding/xml" | ||||
"path" | "path" | ||||
"sort" | "sort" | ||||
"strconv" | |||||
"strings" | "strings" | ||||
"sync" | "sync" | ||||
"time" | "time" | ||||
@@ -127,7 +127,7 @@ func NewMultiPartUpload(uuid string) (string, error){ | |||||
return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) | return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) | ||||
} | } | ||||
func CompleteMultiPartUpload(uuid string, uploadID string, complParts string) (string, error){ | |||||
func CompleteMultiPartUpload(uuid string, uploadID string, complParts []string) (string, error){ | |||||
_, core, err := getClients() | _, core, err := getClients() | ||||
if err != nil { | if err != nil { | ||||
log.Error("getClients failed:", err.Error()) | log.Error("getClients failed:", err.Error()) | ||||
@@ -138,19 +138,16 @@ func CompleteMultiPartUpload(uuid string, uploadID string, complParts string) (s | |||||
bucketName := minio.Bucket | bucketName := minio.Bucket | ||||
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/") | objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/") | ||||
//complParts:{"completedParts":[{"partNumber":2,"eTag":'"684929e7fe8b996d495e7b152d34ae37"'}]} | |||||
var parts CompleteParts | |||||
err = json.Unmarshal([]byte(complParts), &parts) | |||||
if err != nil { | |||||
log.Error("json.Unmarshal(%s) failed:(%s)", complParts, err.Error()) | |||||
return "", err | |||||
} | |||||
// Complete multipart upload. | |||||
var complMultipartUpload completeMultipartUpload | var complMultipartUpload completeMultipartUpload | ||||
for _,part := range parts.Data { | |||||
for _,part := range complParts { | |||||
partNumber, err := strconv.Atoi(strings.Split(part,"-")[0]) | |||||
if err != nil { | |||||
log.Error(err.Error()) | |||||
return "",err | |||||
} | |||||
complMultipartUpload.Parts =append(complMultipartUpload.Parts, miniov6.CompletePart{ | complMultipartUpload.Parts =append(complMultipartUpload.Parts, miniov6.CompletePart{ | ||||
PartNumber:part.PartNumber, | |||||
ETag:part.ETag, | |||||
PartNumber: partNumber, | |||||
ETag: strings.Split(part,"-")[1], | |||||
}) | }) | ||||
} | } | ||||
@@ -336,7 +336,7 @@ func UpdateAttachmentDecompressState(ctx *context.Context) { | |||||
func GetSuccessChunks(ctx *context.Context) { | func GetSuccessChunks(ctx *context.Context) { | ||||
fileMD5 := ctx.Query("md5") | fileMD5 := ctx.Query("md5") | ||||
fileChunk, err := models.GetFileChunkByMD5(fileMD5) | |||||
fileChunk, err := models.GetFileChunkByMD5AndUser(fileMD5, ctx.User.ID) | |||||
if err != nil { | if err != nil { | ||||
if models.IsErrFileChunkNotExist(err) { | if models.IsErrFileChunkNotExist(err) { | ||||
ctx.JSON(200, map[string]string{ | ctx.JSON(200, map[string]string{ | ||||
@@ -447,7 +447,6 @@ func GetMultipartUploadUrl(ctx *context.Context) { | |||||
func CompleteMultipart(ctx *context.Context) { | func CompleteMultipart(ctx *context.Context) { | ||||
uuid := ctx.Query("uuid") | uuid := ctx.Query("uuid") | ||||
uploadID := ctx.Query("uploadID") | uploadID := ctx.Query("uploadID") | ||||
completedParts := ctx.Query("completedParts") | |||||
fileChunk, err := models.GetFileChunkByUUID(uuid) | fileChunk, err := models.GetFileChunkByUUID(uuid) | ||||
if err != nil { | if err != nil { | ||||
@@ -459,7 +458,7 @@ func CompleteMultipart(ctx *context.Context) { | |||||
return | return | ||||
} | } | ||||
_, err = storage.CompleteMultiPartUpload(uuid, uploadID, completedParts) | |||||
_, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.CompletedParts) | |||||
if err != nil { | if err != nil { | ||||
ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) | ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) | ||||
return | return | ||||
@@ -28,7 +28,7 @@ | |||||
export default { | export default { | ||||
data () { | data () { | ||||
return { | return { | ||||
options: { | |||||
/*options: { | |||||
target: 'http://localhost:9000/upload', | target: 'http://localhost:9000/upload', | ||||
testChunks: false, | testChunks: false, | ||||
chunkSize: 1024*1024*64, //64MB | chunkSize: 1024*1024*64, //64MB | ||||
@@ -45,9 +45,9 @@ | |||||
.replace(/\sminutes?/, '分钟') | .replace(/\sminutes?/, '分钟') | ||||
.replace(/\sseconds?/, '秒') | .replace(/\sseconds?/, '秒') | ||||
} | } | ||||
}, | |||||
},*/ | |||||
attrs: { | attrs: { | ||||
accept: '*' | |||||
accept: 'img/*' | |||||
}, | }, | ||||
panelShow: false, //选择文件后,展示上传panel | panelShow: false, //选择文件后,展示上传panel | ||||
collapse: false, | collapse: false, | ||||
@@ -83,7 +83,6 @@ | |||||
file.uuid = response.data.uuid; | file.uuid = response.data.uuid; | ||||
file.uploaded = response.data.uploaded; | file.uploaded = response.data.uploaded; | ||||
file.chunks = response.data.chunks; | file.chunks = response.data.chunks; | ||||
console.log(file.chunks); | |||||
resolve(response); | resolve(response); | ||||
}).catch(function (error) { | }).catch(function (error) { | ||||
console.log(error); | console.log(error); | ||||
@@ -194,7 +193,6 @@ | |||||
await uploadMinio(urls[currentChunk], e); | await uploadMinio(urls[currentChunk], e); | ||||
if (etags[currentChunk] != "") { | if (etags[currentChunk] != "") { | ||||
//更新数据库:分片上传结果 | //更新数据库:分片上传结果 | ||||
console.log(etags[currentChunk]); | |||||
await updateChunk(currentChunk); | await updateChunk(currentChunk); | ||||
} else { | } else { | ||||
return; | return; | ||||
@@ -212,7 +210,9 @@ | |||||
axios.post('/attachments/complete_multipart', qs.stringify({ | axios.post('/attachments/complete_multipart', qs.stringify({ | ||||
uuid: file.uuid, | uuid: file.uuid, | ||||
uploadID: file.uploadID, | uploadID: file.uploadID, | ||||
etag: etags[currentChunk], | |||||
file_name: file.name, | |||||
size: file.size, | |||||
//dataset_id: datasetID, | |||||
_csrf: csrf | _csrf: csrf | ||||
})).then(function (response) { | })).then(function (response) { | ||||
resolve(response); | resolve(response); | ||||
@@ -233,8 +233,10 @@ | |||||
console.log(`第${currentChunk}个分片上传完成, 开始第${currentChunk +1}/${chunks}个分片上传`); | console.log(`第${currentChunk}个分片上传完成, 开始第${currentChunk +1}/${chunks}个分片上传`); | ||||
await loadNext(); | await loadNext(); | ||||
} else { | } else { | ||||
completeUpload(); | |||||
//console.log(dataset_id) | |||||
await completeUpload(); | |||||
console.log(`文件上传完成:${file.name} \n分片:${chunks} 大小:${file.size} 用时:${(new Date().getTime() - time)/1000} s`); | console.log(`文件上传完成:${file.name} \n分片:${chunks} 大小:${file.size} 用时:${(new Date().getTime() - time)/1000} s`); | ||||
//window.location.reload(); | |||||
} | } | ||||
}; | }; | ||||
} | } | ||||
@@ -324,22 +326,18 @@ | |||||
await this.newMultiUpload(file); | await this.newMultiUpload(file); | ||||
if (file.uploadID != "" && file.uuid != "") { | if (file.uploadID != "" && file.uuid != "") { | ||||
file.chunks = ""; | file.chunks = ""; | ||||
//todo:开始分片上传:分片,获取分片上传地址,上传 | |||||
this.multipartUpload(file); | this.multipartUpload(file); | ||||
} else { | } else { | ||||
return; | return; | ||||
} | } | ||||
} else { | } else { | ||||
if (file.uploaded == "1") { //已上传成功 | if (file.uploaded == "1") { //已上传成功 | ||||
//todo:结束上传 | |||||
//秒传 | |||||
console.log("文件已上传完成"); | |||||
//window.location.reload(); | |||||
} else { | } else { | ||||
//todo:查询已上传成功的分片,重新上传未成功上传的分片 | |||||
var successChunks = new Array(); | |||||
var successParts = new Array(); | |||||
successParts = file.chunks.split(","); | |||||
for (let i = 0; i < successParts.length; i++) { | |||||
successChunks[i] = successParts[i].split("-")[0].split("\"")[1]; | |||||
} | |||||
//断点续传 | |||||
this.multipartUpload(file); | |||||
} | } | ||||
} | } | ||||
}, | }, | ||||
@@ -379,7 +377,7 @@ | |||||
<style> | <style> | ||||
.uploader-app { | .uploader-app { | ||||
width: 880px; | |||||
width: 850px; | |||||
padding: 15px; | padding: 15px; | ||||
margin: 40px auto 0; | margin: 40px auto 0; | ||||
font-size: 12px; | font-size: 12px; | ||||
@@ -3172,11 +3172,15 @@ function initVueApp() { | |||||
} | } | ||||
function initVueUploader() { | function initVueUploader() { | ||||
//console.log($dataset.data('dataset-id')); | |||||
/* eslint-disable no-new */ | /* eslint-disable no-new */ | ||||
new Vue({ | new Vue({ | ||||
el: '#uploader', | el: '#uploader', | ||||
components: { App }, | components: { App }, | ||||
template: '<App/>' | template: '<App/>' | ||||
/*props: { | |||||
dataset_id: $dataset.data('dataset-id') | |||||
}*/ | |||||
}); | }); | ||||
} | } | ||||