租房小程序前端代码
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

361 lines
10 KiB

3 months ago
  1. // var debug = require('debug')('ali-oss:multipart');
  2. const util = require('util');
  3. const path = require('path');
  4. const mime = require('mime');
  5. const copy = require('copy-to');
  6. const { isBlob } = require('../common/utils/isBlob');
  7. const { isFile } = require('../common/utils/isFile');
  8. const { isBuffer } = require('../common/utils/isBuffer');
  9. const proto = exports;
  10. /**
  11. * Multipart operations
  12. */
  13. /**
  14. * Upload a file to OSS using multipart uploads
  15. * @param {String} name
  16. * @param {String|File|Buffer} file
  17. * @param {Object} options
  18. * {Object} [options.callback] The callback parameter is composed of a JSON string encoded in Base64
  19. * {String} options.callback.url the OSS sends a callback request to this URL
  20. * {String} [options.callback.host] The host header value for initiating callback requests
  21. * {String} options.callback.body The value of the request body when a callback is initiated
  22. * {String} [options.callback.contentType] The Content-Type of the callback requests initiated
  23. * {Boolean} [options.callback.callbackSNI] Whether OSS sends SNI to the origin address specified by callbackUrl when a callback request is initiated from the client
  24. * {Object} [options.callback.customValue] Custom parameters are a map of key-values, e.g:
  25. * customValue = {
  26. * key1: 'value1',
  27. * key2: 'value2'
  28. * }
  29. */
  30. proto.multipartUpload = async function multipartUpload(name, file, options = {}) {
  31. this.resetCancelFlag();
  32. options.disabledMD5 = options.disabledMD5 === undefined ? true : !!options.disabledMD5;
  33. if (options.checkpoint && options.checkpoint.uploadId) {
  34. if (file && isFile(file)) options.checkpoint.file = file;
  35. return await this._resumeMultipart(options.checkpoint, options);
  36. }
  37. const minPartSize = 100 * 1024;
  38. if (!options.mime) {
  39. if (isFile(file)) {
  40. options.mime = mime.getType(path.extname(file.name));
  41. } else if (isBlob(file)) {
  42. options.mime = file.type;
  43. } else if (isBuffer(file)) {
  44. options.mime = '';
  45. } else {
  46. options.mime = mime.getType(path.extname(file));
  47. }
  48. }
  49. options.headers = options.headers || {};
  50. this._convertMetaToHeaders(options.meta, options.headers);
  51. const fileSize = await this._getFileSize(file);
  52. if (fileSize < minPartSize) {
  53. options.contentLength = fileSize;
  54. const result = await this.put(name, file, options);
  55. if (options && options.progress) {
  56. await options.progress(1);
  57. }
  58. const ret = {
  59. res: result.res,
  60. bucket: this.options.bucket,
  61. name,
  62. etag: result.res.headers.etag
  63. };
  64. if ((options.headers && options.headers['x-oss-callback']) || options.callback) {
  65. ret.data = result.data;
  66. }
  67. return ret;
  68. }
  69. if (options.partSize && !(parseInt(options.partSize, 10) === options.partSize)) {
  70. throw new Error('partSize must be int number');
  71. }
  72. if (options.partSize && options.partSize < minPartSize) {
  73. throw new Error(`partSize must not be smaller than ${minPartSize}`);
  74. }
  75. const initResult = await this.initMultipartUpload(name, options);
  76. const { uploadId } = initResult;
  77. const partSize = this._getPartSize(fileSize, options.partSize);
  78. const checkpoint = {
  79. file,
  80. name,
  81. fileSize,
  82. partSize,
  83. uploadId,
  84. doneParts: []
  85. };
  86. if (options && options.progress) {
  87. await options.progress(0, checkpoint, initResult.res);
  88. }
  89. return await this._resumeMultipart(checkpoint, options);
  90. };
  91. /*
  92. * Resume multipart upload from checkpoint. The checkpoint will be
  93. * updated after each successful part upload.
  94. * @param {Object} checkpoint the checkpoint
  95. * @param {Object} options
  96. */
  97. proto._resumeMultipart = async function _resumeMultipart(checkpoint, options) {
  98. const that = this;
  99. if (this.isCancel()) {
  100. throw this._makeCancelEvent();
  101. }
  102. const { file, fileSize, partSize, uploadId, doneParts, name } = checkpoint;
  103. const internalDoneParts = [];
  104. if (doneParts.length > 0) {
  105. copy(doneParts).to(internalDoneParts);
  106. }
  107. const partOffs = this._divideParts(fileSize, partSize);
  108. const numParts = partOffs.length;
  109. let multipartFinish = false;
  110. let uploadPartJob = (self, partNo) => {
  111. // eslint-disable-next-line no-async-promise-executor
  112. return new Promise(async (resolve, reject) => {
  113. try {
  114. if (!self.isCancel()) {
  115. const pi = partOffs[partNo - 1];
  116. const content = await self._createBuffer(file, pi.start, pi.end);
  117. const data = {
  118. content,
  119. size: pi.end - pi.start
  120. };
  121. let result;
  122. try {
  123. result = await self._uploadPart(name, uploadId, partNo, data, options);
  124. } catch (error) {
  125. if (error.status === 404) {
  126. throw self._makeAbortEvent();
  127. }
  128. throw error;
  129. }
  130. if (!self.isCancel() && !multipartFinish) {
  131. checkpoint.doneParts.push({
  132. number: partNo,
  133. etag: result.res.headers.etag
  134. });
  135. if (options.progress) {
  136. await options.progress(doneParts.length / (numParts + 1), checkpoint, result.res);
  137. }
  138. resolve({
  139. number: partNo,
  140. etag: result.res.headers.etag
  141. });
  142. } else {
  143. resolve();
  144. }
  145. } else {
  146. resolve();
  147. }
  148. } catch (err) {
  149. const tempErr = new Error();
  150. tempErr.name = err.name;
  151. tempErr.message = err.message;
  152. tempErr.stack = err.stack;
  153. tempErr.partNum = partNo;
  154. copy(err).to(tempErr);
  155. reject(tempErr);
  156. }
  157. });
  158. };
  159. const all = Array.from(new Array(numParts), (x, i) => i + 1);
  160. const done = internalDoneParts.map(p => p.number);
  161. const todo = all.filter(p => done.indexOf(p) < 0);
  162. const defaultParallel = 5;
  163. const parallel = options.parallel || defaultParallel;
  164. // upload in parallel
  165. const jobErr = await this._parallel(todo, parallel, value => {
  166. return new Promise((resolve, reject) => {
  167. uploadPartJob(that, value)
  168. .then(result => {
  169. if (result) {
  170. internalDoneParts.push(result);
  171. }
  172. resolve();
  173. })
  174. .catch(err => {
  175. reject(err);
  176. });
  177. });
  178. });
  179. multipartFinish = true;
  180. const abortEvent = jobErr.find(err => err.name === 'abort');
  181. if (abortEvent) throw abortEvent;
  182. if (this.isCancel()) {
  183. uploadPartJob = null;
  184. throw this._makeCancelEvent();
  185. }
  186. if (jobErr && jobErr.length > 0) {
  187. jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${
  188. jobErr[0].partNum
  189. }`;
  190. throw jobErr[0];
  191. }
  192. return await this.completeMultipartUpload(name, uploadId, internalDoneParts, options);
  193. };
  194. /**
  195. * Get file size
  196. */
  197. proto._getFileSize = async function _getFileSize(file) {
  198. if (isBuffer(file)) {
  199. return file.length;
  200. } else if (isBlob(file) || isFile(file)) {
  201. return file.size;
  202. }
  203. throw new Error('_getFileSize requires Buffer/File/Blob.');
  204. };
  205. /*
  206. * Readable stream for Web File
  207. */
  208. const { Readable } = require('stream');
  209. function WebFileReadStream(file, options) {
  210. if (!(this instanceof WebFileReadStream)) {
  211. return new WebFileReadStream(file, options);
  212. }
  213. Readable.call(this, options);
  214. this.file = file;
  215. this.reader = new FileReader();
  216. this.start = 0;
  217. this.finish = false;
  218. this.fileBuffer = null;
  219. }
  220. util.inherits(WebFileReadStream, Readable);
  221. WebFileReadStream.prototype.readFileAndPush = function readFileAndPush(size) {
  222. if (this.fileBuffer) {
  223. let pushRet = true;
  224. while (pushRet && this.fileBuffer && this.start < this.fileBuffer.length) {
  225. const { start } = this;
  226. let end = start + size;
  227. end = end > this.fileBuffer.length ? this.fileBuffer.length : end;
  228. this.start = end;
  229. pushRet = this.push(this.fileBuffer.slice(start, end));
  230. }
  231. }
  232. };
  233. WebFileReadStream.prototype._read = function _read(size) {
  234. if (
  235. (this.file && this.start >= this.file.size) ||
  236. (this.fileBuffer && this.start >= this.fileBuffer.length) ||
  237. this.finish ||
  238. (this.start === 0 && !this.file)
  239. ) {
  240. if (!this.finish) {
  241. this.fileBuffer = null;
  242. this.finish = true;
  243. }
  244. this.push(null);
  245. return;
  246. }
  247. const defaultReadSize = 16 * 1024;
  248. size = size || defaultReadSize;
  249. const that = this;
  250. this.reader.onload = function onload(e) {
  251. that.fileBuffer = Buffer.from(new Uint8Array(e.target.result));
  252. that.file = null;
  253. that.readFileAndPush(size);
  254. };
  255. if (this.start === 0) {
  256. this.reader.readAsArrayBuffer(this.file);
  257. } else {
  258. this.readFileAndPush(size);
  259. }
  260. };
  261. function getBuffer(file) {
  262. // Some browsers do not support Blob.prototype.arrayBuffer, such as IE
  263. if (file.arrayBuffer) return file.arrayBuffer();
  264. return new Promise((resolve, reject) => {
  265. const reader = new FileReader();
  266. reader.onload = function (e) {
  267. resolve(e.target.result);
  268. };
  269. reader.onerror = function (e) {
  270. reject(e);
  271. };
  272. reader.readAsArrayBuffer(file);
  273. });
  274. }
  275. proto._createBuffer = async function _createBuffer(file, start, end) {
  276. if (isBlob(file) || isFile(file)) {
  277. const _file = file.slice(start, end);
  278. const fileContent = await getBuffer(_file);
  279. return Buffer.from(fileContent);
  280. } else if (isBuffer(file)) {
  281. return file.subarray(start, end);
  282. } else {
  283. throw new Error('_createBuffer requires File/Blob/Buffer.');
  284. }
  285. };
  286. proto._getPartSize = function _getPartSize(fileSize, partSize) {
  287. const maxNumParts = 10 * 1000;
  288. const defaultPartSize = 1 * 1024 * 1024;
  289. if (!partSize) partSize = defaultPartSize;
  290. const safeSize = Math.ceil(fileSize / maxNumParts);
  291. if (partSize < safeSize) {
  292. partSize = safeSize;
  293. console.warn(
  294. `partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000`
  295. );
  296. }
  297. return partSize;
  298. };
  299. proto._divideParts = function _divideParts(fileSize, partSize) {
  300. const numParts = Math.ceil(fileSize / partSize);
  301. const partOffs = [];
  302. for (let i = 0; i < numParts; i++) {
  303. const start = partSize * i;
  304. const end = Math.min(start + partSize, fileSize);
  305. partOffs.push({
  306. start,
  307. end
  308. });
  309. }
  310. return partOffs;
  311. };