123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224 |
- let bucket;
- let bucketname = "ccrb";
- const partsize = 5 * 1024 * 1024;
- async function init() {
- const credentials = {
- accessKeyId: "AKIATLPEDU37QV5CHLMH",
- secretAccessKey: "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",
- };
- window.AWS.config.update(credentials);
- window.AWS.config.region = "cn-northwest-1";
-
- bucket = new window.AWS.S3({
- params: {
- Bucket: bucketname
- }
- });
- return bucket;
- }
- async function initMultipartUpload(key, file) {
- const params = {
- Bucket: bucketname,
- Key: key,
- ContentType: file.type,
- "ACL": "public-read"
- };
-
- const data = await bucket.createMultipartUpload(params).promise();
- return data.UploadId;
- }
- async function uploadPart(file, keyname, uploadid, pn, start, end) {
-
- var params = {
- Bucket: bucketname,
- Key: keyname,
- PartNumber: pn,
- UploadId: uploadid,
- Body: file.slice(start, end)
- };
- const result = await bucket.uploadPart(params).promise();
- return { ETag: result.ETag, PartNumber: pn };
- }
- async function completeMultipartUpload(parts, keyname, uploadid) {
- const params = {
- Bucket: bucketname,
- Key: keyname,
- MultipartUpload: { Parts: parts },
- UploadId: uploadid
- };
- return await bucket.completeMultipartUpload(params).promise();
- }
- async function abortMultipartUpload(key, uploadid) {
- const params = {
- Bucket: bucketname,
- Key: key,
- UploadId: uploadid
- };
- await bucket.abortMultipartUpload(params).promise();
- }
- async function uploadFile(file, folderid) {
- folderid = folderid || window.Guid.newGuid();
- var keyname = folderid + "/" + file.name;
- var uploadid = "";
- try {
- init();
-
- uploadid = await initMultipartUpload(keyname, file);
-
- let parts = [];
- let start = 0;
- let end = 0;
- let len = Math.ceil(file.length / partsize);
-
- for (let i = 0; i < len; i++) {
- start = i * partsize;
- end = (i + 1) * partsize;
- parts.push(await uploadPart(file, keyname, uploadid, i, start, end));
- }
-
- await completeMultipartUpload(parts, keyname, uploadid);
- return uploadid;
- }
- catch (error) {
-
- console.error('An error occurred during file upload:', error);
- await abortMultipartUpload(keyname, uploadid);
- }
- }
- async function init() {
-
- const credentials = {
- accessKeyId: "AKIATLPEDU37QV5CHLMH",
- secretAccessKey: "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",
- region: "cn-northwest-1"
- };
- window.AWS.config.update(credentials);
-
-
- bucket = new window.AWS.S3({
- params: {
- Bucket: bucketname
- }
- });
- return bucket;
- }
- async function getawscheckpoint(key) {
- let partsinfo;
- try {
- const result = await bucket.listMultipartUploads({ Bucket: bucketname, Prefix: key }).promise();
-
- if (result.Uploads.length) {
- uploadid = result.Uploads[result.Uploads.length - 1].UploadId;
- partsinfo = await bucket.listParts({ Bucket: bucketname, Key: key, UploadId: uploadid, }).promise();
- }
- } catch (err) {
- console.log(err);
- }
- return { uploadid, partsinfo };
- }
- async function awsuploadpart(filestate, file, uploadid, parts, key) {
- var partarr = [];
-
- const completeparts = parts.map((_) => {
- partarr.push(_.PartNumber);
- return { PartNumber: _.PartNumber, ETag: _.ETag };
- });
-
- let uploadpart;
- let start = 0;
- let end = 0;
- let len = Math.ceil(file.size / partsize);
- if (partarr.length) {
- filestate.status = "processing";
- filestate.percent = parseInt((completeparts.length * 100) / len);
- }
-
- for (let i = 0; i < len; i++) {
- start = i * partsize;
- end = (i + 1) * partsize;
- if (!partarr.includes(i)) {
- uploadpart = await uploadPart(file, key, uploadid, i + 1, start, end);
- if (uploadpart.ETag != null) {
- completeparts.push(uploadpart);
- filestate.percent = parseInt((completeparts.length * 100) / len);
- }
- else {
- filestate.status = "fail";
- return;
- }
- }
- }
-
- var data = await completeMultipartUpload(completeparts, key, uploadid);
- filestate.status = "success";
- return data;
- }
- async function awsupload(file, folderid, filestate) {
- init();
- const key = (folderid || window.Guid.newGuid()) + "/" + file.name;
- filestate.percent = 0;
- filestate.status = "start";
-
- var params = {
- Bucket: bucketname,
- Key: key
- };
-
- try {
-
- bucket.headObject(params, async (err, data) => {
-
- if (err) {
-
- const { uploadid, partsinfo } = await getawscheckpoint(key, bucket);
-
- if (uploadid) {
-
- var data = await awsuploadpart(filestate, file, uploadid, partsinfo.Parts, key);
- return { data, key, uploadid };
- }
-
- else {
- const uploadid = await initMultipartUpload(key, file);
- var data = await awsuploadpart(filestate, file, uploadid, [], key);
- return { data, key, uploadid };
- }
- }
-
- else if (data) {
-
- filestate.percent = 100;
- filestate.status = "success";
- return { data, key };
- }
- });
- }
- catch (err) {
- filestate.status = "error";
- console.log(err);
- }
- }
|