You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

MinioUploader.vue 11 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356
  1. <template>
  2. <div class="dropzone-wrapper">
  3. <div id="dropzone" class="dropzone">
  4. </div>
  5. <p>{{file_status_text}} {{ status }}</p>
  6. </div>
  7. </template>
  8. <script>
  9. // import Dropzone from 'dropzone/dist/dropzone.js';
  10. // import 'dropzone/dist/dropzone.css'
  11. import createDropzone from '../features/dropzone.js';
  12. import SparkMD5 from 'spark-md5';
  13. import axios from 'axios';
  14. import qs from 'qs';
  15. const {
  16. AppSubUrl,
  17. StaticUrlPrefix,
  18. csrf
  19. } = window.config;
  20. export default {
  21. data() {
  22. return {
  23. dropzoneUploader: null,
  24. maxFiles: 1,
  25. maxFilesize: 1 * 1024 * 1024 * 1024 * 1024,
  26. acceptedFiles: '*/*',
  27. progress: 0,
  28. status: '',
  29. dropzoneParams: {},
  30. file_status_text: '',
  31. }
  32. },
  33. async mounted() {
  34. this.dropzoneParams = $('div#minioUploader-params')
  35. this.file_status_text = this.dropzoneParams.data('file-status')
  36. this.status = this.dropzoneParams.data('file-init-status')
  37. const $dropzone = $('div#dropzone')
  38. const dropzoneUploader = await createDropzone($dropzone[0], {
  39. url: '/todouploader',
  40. maxFiles: this.maxFiles,
  41. maxFilesize: this.maxFileSize,
  42. timeout: 0,
  43. autoQueue: false,
  44. dictDefaultMessage: this.dropzoneParams.data('default-message'),
  45. dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'),
  46. dictFileTooBig: this.dropzoneParams.data('file-too-big'),
  47. dictRemoveFile: this.dropzoneParams.data('remove-file'),
  48. })
  49. dropzoneUploader.on("addedfile", (file) => {
  50. setTimeout(() => {
  51. file.accepted && this.onFileAdded(file);
  52. }, 200);
  53. });
  54. dropzoneUploader.on("maxfilesexceeded", function(file) {
  55. if (this.files[0].status !== 'success') {
  56. alert(this.dropzoneParams.data('waitting-uploading'))
  57. this.removeFile(file)
  58. return
  59. }
  60. this.removeAllFiles();
  61. this.addFile(file);
  62. });
  63. this.dropzoneUploader = dropzoneUploader
  64. },
  65. methods: {
  66. resetStatus() {
  67. this.progress = 0
  68. this.status = ''
  69. },
  70. updateProgress(file, progress) {
  71. file.previewTemplate.querySelector(".dz-upload").style.width = `${progress}%`;
  72. },
  73. emitDropzoneSuccess(file) {
  74. file.status = "success";
  75. this.dropzoneUploader.emit("success", file);
  76. this.dropzoneUploader.emit("complete", file);
  77. },
  78. onFileAdded(file) {
  79. file.datasetId = document.getElementById("datasetId").getAttribute("datasetId");
  80. this.resetStatus()
  81. this.computeMD5(file);
  82. },
  83. finishUpload(file) {
  84. this.emitDropzoneSuccess(file)
  85. setTimeout(() => {
  86. window.location.reload();
  87. }, 1000);
  88. },
  89. computeMD5(file) {
  90. this.resetStatus()
  91. let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
  92. chunkSize = 1024 * 1024 * 64,
  93. chunks = Math.ceil(file.size / chunkSize),
  94. currentChunk = 0,
  95. spark = new SparkMD5.ArrayBuffer(),
  96. fileReader = new FileReader();
  97. let time = new Date().getTime();
  98. // console.log('计算MD5...')
  99. this.status = this.dropzoneParams.data('md5-computing')
  100. file.totalChunkCounts = chunks;
  101. loadNext();
  102. fileReader.onload = (e) => {
  103. fileLoaded.call(this, e)
  104. };
  105. fileReader.onerror = (err) => {
  106. console.warn('oops, something went wrong.', err);
  107. file.cancel();
  108. };
  109. function fileLoaded(e){
  110. spark.append(e.target.result); // Append array buffer
  111. currentChunk++;
  112. if (currentChunk < chunks) {
  113. // console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`);
  114. this.status = `${this.dropzoneParams.data('loading-file')} ${(currentChunk/chunks*100).toFixed(2)}% (${currentChunk}/${chunks})`;
  115. this.updateProgress(file, (currentChunk/chunks*100).toFixed(2))
  116. loadNext();
  117. return
  118. }
  119. let md5 = spark.end();
  120. console.log(
  121. `MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${file.size} 用时:${(new Date().getTime() - time)/1000} s`
  122. );
  123. spark.destroy(); //释放缓存
  124. file.uniqueIdentifier = md5; //将文件md5赋值给文件唯一标识
  125. file.cmd5 = false; //取消计算md5状态
  126. this.computeMD5Success(file);
  127. }
  128. function loadNext() {
  129. let start = currentChunk * chunkSize;
  130. let end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
  131. fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
  132. }
  133. },
  134. async computeMD5Success(md5edFile) {
  135. const file = await this.getSuccessChunks(md5edFile);
  136. if (file.uploadID == "" || file.uuid == "") { //未上传过
  137. await this.newMultiUpload(file);
  138. if (file.uploadID != "" && file.uuid != "") {
  139. file.chunks = "";
  140. this.multipartUpload(file);
  141. } else {
  142. //失败如何处理
  143. return;
  144. }
  145. return
  146. }
  147. if (file.uploaded == "1") { //已上传成功
  148. //秒传
  149. if (file.attachID == "0") { //删除数据集记录,未删除文件
  150. await addAttachment(file);
  151. }
  152. console.log("文件已上传完成");
  153. this.progress = 100;
  154. this.status = this.dropzoneParams.data('upload-complete')
  155. this.finishUpload(file)
  156. } else {
  157. //断点续传
  158. this.multipartUpload(file);
  159. }
  160. async function addAttachment(file) {
  161. return await axios.post('/attachments/add', qs.stringify({
  162. uuid: file.uuid,
  163. file_name: file.name,
  164. size: file.size,
  165. dataset_id: file.datasetId,
  166. _csrf: csrf
  167. }))
  168. }
  169. },
  170. async getSuccessChunks(file) {
  171. const params = {
  172. params: {
  173. md5: file.uniqueIdentifier,
  174. _csrf: csrf
  175. }
  176. }
  177. try {
  178. const response = await axios.get('/attachments/get_chunks', params)
  179. file.uploadID = response.data.uploadID;
  180. file.uuid = response.data.uuid;
  181. file.uploaded = response.data.uploaded;
  182. file.chunks = response.data.chunks;
  183. file.attachID = response.data.attachID;
  184. return file
  185. } catch(error) {
  186. console.log("getSuccessChunks catch: ", error);
  187. return null
  188. }
  189. },
  190. async newMultiUpload(file) {
  191. const res = await axios.get('/attachments/new_multipart', {
  192. params: {
  193. totalChunkCounts: file.totalChunkCounts,
  194. md5: file.uniqueIdentifier,
  195. size: file.size,
  196. fileType: file.fileType,
  197. _csrf: csrf
  198. }
  199. })
  200. file.uploadID = res.data.uploadID;
  201. file.uuid = res.data.uuid;
  202. },
  203. multipartUpload(file) {
  204. let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
  205. chunkSize = 1024 * 1024 * 64,
  206. chunks = Math.ceil(file.size / chunkSize),
  207. currentChunk = 0,
  208. fileReader = new FileReader(),
  209. time = new Date().getTime();
  210. function loadNext() {
  211. let start = currentChunk * chunkSize;
  212. let end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
  213. fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
  214. }
  215. function checkSuccessChunks() {
  216. var index = successChunks.indexOf((currentChunk + 1).toString())
  217. if (index == -1) {
  218. return false;
  219. }
  220. return true;
  221. }
  222. async function getUploadChunkUrl(currentChunk, partSize) {
  223. const res = await axios.get('/attachments/get_multipart_url', {
  224. params: {
  225. uuid: file.uuid,
  226. uploadID: file.uploadID,
  227. size: partSize,
  228. chunkNumber: currentChunk + 1,
  229. _csrf: csrf
  230. }
  231. })
  232. console.log("getUploadChunkUrl: ", res)
  233. urls[currentChunk] = res.data.url
  234. }
  235. async function uploadMinio(url, e) {
  236. const res = await axios.put(url, e.target.result)
  237. etags[currentChunk] = res.headers.etag;
  238. }
  239. async function updateChunk(currentChunk) {
  240. await axios.post('/attachments/update_chunk', qs.stringify({
  241. uuid: file.uuid,
  242. chunkNumber: currentChunk + 1,
  243. etag: etags[currentChunk],
  244. _csrf: csrf
  245. }))
  246. }
  247. async function uploadChunk(e) {
  248. if (!checkSuccessChunks()) {
  249. let start = currentChunk * chunkSize;
  250. let partSize = ((start + chunkSize) >= file.size) ? file.size - start : chunkSize;
  251. //获取分片上传url
  252. await getUploadChunkUrl(currentChunk, partSize);
  253. if (urls[currentChunk] != "") {
  254. //上传到minio
  255. await uploadMinio(urls[currentChunk], e);
  256. if (etags[currentChunk] != "") {
  257. //更新数据库:分片上传结果
  258. await updateChunk(currentChunk);
  259. } else {
  260. return;
  261. }
  262. } else {
  263. return;
  264. }
  265. }
  266. };
  267. async function completeUpload() {
  268. return await axios.post('/attachments/complete_multipart', qs.stringify({
  269. uuid: file.uuid,
  270. uploadID: file.uploadID,
  271. file_name: file.name,
  272. size: file.size,
  273. dataset_id: file.datasetId,
  274. _csrf: csrf
  275. }))
  276. }
  277. var successChunks = new Array();
  278. var successParts = new Array();
  279. successParts = file.chunks.split(",");
  280. for (let i = 0; i < successParts.length; i++) {
  281. successChunks[i] = successParts[i].split("-")[0].split("\"")[1];
  282. }
  283. var urls = new Array();
  284. var etags = new Array();
  285. console.log('上传分片...');
  286. this.status = this.dropzoneParams.data('uploading')
  287. loadNext();
  288. fileReader.onload = async (e) => {
  289. await uploadChunk(e);
  290. currentChunk++;
  291. if (currentChunk < chunks) {
  292. console.log(`第${currentChunk}个分片上传完成, 开始第${currentChunk +1}/${chunks}个分片上传`);
  293. this.progress = Math.ceil((currentChunk / chunks) * 100);
  294. this.updateProgress(file, (currentChunk/chunks*100).toFixed(2))
  295. this.status = `${this.dropzoneParams.data('uploading')} ${(currentChunk/chunks*100).toFixed(2)}%`
  296. await loadNext();
  297. } else {
  298. await completeUpload();
  299. console.log(`文件上传完成:${file.name} \n分片:${chunks} 大小:${file.size} 用时:${(new Date().getTime() - time)/1000} s`);
  300. this.progress = 100;
  301. this.status = this.dropzoneParams.data('upload-complete')
  302. this.finishUpload(file)
  303. }
  304. };
  305. },
  306. }
  307. }
  308. </script>
  309. <style>
  310. .dropzone-wrapper {
  311. margin: 2em auto ;
  312. }
  313. .ui .dropzone {
  314. border: 2px dashed #0087f5;
  315. box-shadow: none !important;
  316. padding: 0;
  317. min-height: 5rem;
  318. border-radius: 4px;
  319. }
  320. .dataset .dataset-files #dataset .dz-preview.dz-file-preview, .dataset .dataset-files #dataset .dz-preview.dz-processing {
  321. display: flex;
  322. align-items: center;
  323. }
  324. .dataset .dataset-files #dataset .dz-preview {
  325. border-bottom: 1px solid #dadce0;
  326. min-height: 0;
  327. }
  328. </style>