|
- <template>
- <uploader
- ref="uploader"
- :options="options"
- :autoStart="false"
- @file-added="onFileAdded"
- @file-success="onFileSuccess"
- @file-progress="onFileProgress"
- @file-error="onFileError"
- class="uploader-app">
- <uploader-unsupport></uploader-unsupport>
- <uploader-drop>
- <p>拖动文件</p>
- <uploader-btn>选择文件</uploader-btn>
- </uploader-drop>
- <uploader-list></uploader-list>
- </uploader>
- </template>
-
- <script>
-
- import SparkMD5 from 'spark-md5';
- import axios from 'axios'
- import qs from 'qs'
-
- const {AppSubUrl, StaticUrlPrefix, csrf} = window.config;
-
- export default {
- data () {
- return {
- attrs: {
- accept: '*'
- },
- props: ["datasetId"],
- panelShow: false, //选择文件后,展示上传panel
- collapse: false,
- statusTextMap: {
- success: '上传成功',
- error: '上传出错了',
- uploading: '上传中...',
- paused: '暂停',
- waiting: '等待中...',
- cmd5: '计算md5...'
- },
- fileStatusText: (status, response) => {
- return this.statusTextMap[status];
- },
- }
- },
- created() {
- //const uploaderInstance = this.$refs.uploader;
- },
- methods: {
- onFileAdded(file) {
- file.datasetId = document.getElementById("datasetId").getAttribute("datasetId");
- // 计算MD5
- this.computeMD5(file);
- },
- getSuccessChunks(file) {
- return new Promise((resolve, reject) => {
- axios.get('/attachments/get_chunks', {params :{
- md5: file.uniqueIdentifier,
- _csrf: csrf
- }}).then(function (response) {
- file.uploadID = response.data.uploadID;
- file.uuid = response.data.uuid;
- file.uploaded = response.data.uploaded;
- file.chunks = response.data.chunks;
- file.attachID = response.data.attachID;
- resolve(response);
- }).catch(function (error) {
- console.log(error);
- reject(error);
- });
- })
-
- },
- newMultiUpload(file) {
- return new Promise((resolve, reject) => {
- axios.get('/attachments/new_multipart', {params :{
- totalChunkCounts: file.totalChunkCounts,
- md5: file.uniqueIdentifier,
- size: file.size,
- fileType: file.fileType,
- _csrf: csrf
- }}).then(function (response) {
- file.uploadID = response.data.uploadID;
- file.uuid = response.data.uuid;
- resolve(response);
- }).catch(function (error) {
- console.log(error);
- reject(error);
- });
- })
- },
- multipartUpload(file) {
- let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
- chunkSize = 1024*1024*64,
- chunks = Math.ceil(file.size / chunkSize),
- currentChunk = 0,
- fileReader = new FileReader(),
- time = new Date().getTime();
-
- function loadNext() {
- let start = currentChunk * chunkSize;
- let end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
-
- fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end));
- }
-
- function checkSuccessChunks() {
- var index = successChunks.indexOf((currentChunk+1).toString())
- if (index == -1) {
- return false;
- }
-
- return true;
- }
-
- function getUploadChunkUrl(currentChunk, partSize) {
- return new Promise((resolve, reject) => {
- axios.get('/attachments/get_multipart_url', {params :{
- uuid: file.uuid,
- uploadID: file.uploadID,
- size: partSize,
- chunkNumber: currentChunk+1,
- _csrf: csrf
- }}).then(function (response) {
- urls[currentChunk] = response.data.url
- resolve(response);
- }).catch(function (error) {
- console.log(error);
- reject(error);
- });
- })
- }
-
- function uploadMinio(url, e) {
- return new Promise((resolve, reject) => {
-
- axios.put(url, e.target.result
- ).then(function (res) {
- etags[currentChunk] = res.headers.etag;
- resolve(res);
- }).catch(function (err) {
- console.log(err);
- reject(err);
- });
- });
- }
-
- function updateChunk(currentChunk) {
- return new Promise((resolve, reject) => {
- axios.post('/attachments/update_chunk', qs.stringify({
- uuid: file.uuid,
- chunkNumber: currentChunk+1,
- etag: etags[currentChunk],
- _csrf: csrf
- })).then(function (response) {
- resolve(response);
- }).catch(function (error) {
- console.log(error);
- reject(error);
- });
- })
- }
-
- async function uploadChunk(e) {
- if (!checkSuccessChunks()) {
- let start = currentChunk * chunkSize;
- let partSize = ((start + chunkSize) >= file.size) ? file.size -start : chunkSize;
-
- //获取分片上传url
- await getUploadChunkUrl(currentChunk, partSize);
- if (urls[currentChunk] != "") {
- //上传到minio
- await uploadMinio(urls[currentChunk], e);
- if (etags[currentChunk] != "") {
- //更新数据库:分片上传结果
- await updateChunk(currentChunk);
- } else {
- return;
- }
- } else {
- return;
- }
-
- }
-
- };
-
- function completeUpload(){
- return new Promise((resolve, reject) => {
- axios.post('/attachments/complete_multipart', qs.stringify({
- uuid: file.uuid,
- uploadID: file.uploadID,
- file_name: file.name,
- size: file.size,
- dataset_id: file.datasetId,
- _csrf: csrf
- })).then(function (response) {
- resolve(response);
- }).catch(function (error) {
- console.log(error);
- reject(error);
- });
- })
- }
-
- function upload() {
- loadNext();
- fileReader.onload = async (e) => {
- await uploadChunk(e);
- currentChunk++;
-
- if (currentChunk < chunks) {
- console.log(`第${currentChunk}个分片上传完成, 开始第${currentChunk +1}/${chunks}个分片上传`);
- await loadNext();
- } else {
- //console.log({{.dataset_id}});
- await completeUpload();
- console.log(`文件上传完成:${file.name} \n分片:${chunks} 大小:${file.size} 用时:${(new Date().getTime() - time)/1000} s`);
- //window.location.reload();
- }
- };
- }
-
- var successChunks = new Array();
- var successParts = new Array();
- successParts = file.chunks.split(",");
- for (let i = 0; i < successParts.length; i++) {
- successChunks[i] = successParts[i].split("-")[0].split("\"")[1];
- }
-
- var urls = new Array();
- var etags = new Array();
-
- console.log('上传分片...');
-
- upload();
-
- },
- chkMd5(file) {
- let time = new Date().getTime();
- let fileReader = new FileReader();
- let spark = new SparkMD5(); //创建md5对象(基于SparkMD5)
- fileReader.readAsBinaryString(file.file);
- console.log('开始计算MD5...')
- //文件读取完毕之后的处理
- fileReader.onload = (e) => {
- spark.appendBinary(e.target.result);
- let md5 = spark.end();
- console.log(`MD5计算完成:${file.name} \nMD5:${md5} \n用时:${new Date().getTime() - time} ms`);
- spark.destroy();
- };
- },
- //计算MD5
- computeMD5(file) {
- let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
- chunkSize = 1024*1024*64,
- chunks = Math.ceil(file.size / chunkSize),
- currentChunk = 0,
- spark = new SparkMD5.ArrayBuffer(),
- fileReader = new FileReader();
-
- let time = new Date().getTime();
-
- console.log('计算MD5...')
- file.cmd5 = true;
- file.totalChunkCounts = chunks;
- loadNext();
-
- fileReader.onload = (e) => {
- spark.append(e.target.result); // Append array buffer
- currentChunk++;
-
- if (currentChunk < chunks) {
- console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`);
- // let percent = Math.floor(currentChunk / chunks * 100);
- // console.log(percent);
- // file.cmd5progress = percent;
- loadNext();
- } else {
- let md5 = spark.end();
- console.log(`MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${file.size} 用时:${(new Date().getTime() - time)/1000} s`);
- spark.destroy(); //释放缓存
- file.uniqueIdentifier = md5; //将文件md5赋值给文件唯一标识
- file.cmd5 = false; //取消计算md5状态
-
- this.computeMD5Success(file);
- }
- };
-
- fileReader.onerror = () => {
- console.warn('oops, something went wrong.');
- file.cancel();
- };
-
- function loadNext() {
- let start = currentChunk * chunkSize;
- let end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
-
- fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end));
- }
- },
- async computeMD5Success(file) {
- await this.getSuccessChunks(file);
-
- if (file.uploadID == "" || file.uuid == "") { //未上传过
- await this.newMultiUpload(file);
- if (file.uploadID != "" && file.uuid != "") {
- file.chunks = "";
- this.multipartUpload(file);
- } else {
- return;
- }
- } else {
- if (file.uploaded == "1") { //已上传成功
- //秒传
- if (file.attachID == "0") { //删除数据集记录,未删除文件
- await addAttachment(file);
- }
- console.log("文件已上传完成");
- //window.location.reload();
- } else {
- //断点续传
- this.multipartUpload(file);
- }
- }
-
- function addAttachment(file){
- return new Promise((resolve, reject) => {
- axios.post('/attachments/add', qs.stringify({
- uuid: file.uuid,
- file_name: file.name,
- size: file.size,
- dataset_id: file.datasetId,
- _csrf: csrf
- })).then(function (response) {
- resolve(response);
- }).catch(function (error) {
- console.log(error);
- reject(error);
- });
- })
- }
- },
- // 文件进度的回调
- onFileProgress(rootFile, file, chunk) {
- console.log(`上传中 ${file.name},chunk:${chunk.startByte / 1024 / 1024} ~ ${chunk.endByte / 1024 / 1024}`)
- },
- onFileSuccess(rootFile, file, response, chunk) {
- let resp = JSON.parse(response);
- if (resp.code === 0 && resp.merge === false) {
- console.log('上传成功,不需要合并');
- } else {
- axios.post('http://localhost:9999/up.php?action=merge', {
- filename: file.name,
- identifier: file.uniqueIdentifier,
- totalSize: file.size,
- totalChunks: chunk.offset + 1
- }).then(function(res){
- if (res.code === 0) {
- console.log('上传成功')
- } else {
- console.log(res.message);
- }
- })
- .catch(function(error){
- console.log(error);
- });
- }
- },
-
- onFileError(rootFile, file, response, chunk) {
- console.log('Error:', response)
- },
- }
- }
- </script>
-
- <style>
- .uploader-app {
- width: 850px;
- padding: 15px;
- margin: 40px auto 0;
- font-size: 12px;
- box-shadow: 0 0 10px rgba(0, 0, 0, .4);
- }
- .uploader-app .uploader-btn {
- margin-right: 40px;
- }
- .uploader-app .uploader-list {
- max-height: 440px;
- overflow: auto;
- overflow-x: hidden;
- overflow-y: auto;
- }
- </style>
|