mirror of
https://github.com/cloudreve/frontend.git
synced 2025-12-25 19:52:48 +00:00
feat: file blob encryption
This commit is contained in:
parent
1c38544ef7
commit
1c9dd8d9ad
|
|
@ -35,7 +35,7 @@
|
||||||
"artplayer": "5.2.2",
|
"artplayer": "5.2.2",
|
||||||
"artplayer-plugin-chapter": "^1.0.0",
|
"artplayer-plugin-chapter": "^1.0.0",
|
||||||
"artplayer-plugin-hls-control": "^1.0.1",
|
"artplayer-plugin-hls-control": "^1.0.1",
|
||||||
"axios": "^1.6.2",
|
"axios": "^1.12.2",
|
||||||
"dayjs": "^1.11.10",
|
"dayjs": "^1.11.10",
|
||||||
"fuse.js": "^7.0.0",
|
"fuse.js": "^7.0.0",
|
||||||
"heic-to": "^1.1.14",
|
"heic-to": "^1.1.14",
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import { AxiosProgressEvent, CancelToken } from "axios";
|
import { AxiosProgressEvent, CancelToken } from "axios";
|
||||||
|
import { EncryptedBlob } from "../component/Uploader/core/uploader/encrypt/blob.ts";
|
||||||
import i18n from "../i18n.ts";
|
import i18n from "../i18n.ts";
|
||||||
import {
|
import {
|
||||||
AdminListGroupResponse,
|
AdminListGroupResponse,
|
||||||
|
|
@ -722,16 +723,20 @@ export function sendUploadChunk(
|
||||||
onProgress?: (progressEvent: AxiosProgressEvent) => void,
|
onProgress?: (progressEvent: AxiosProgressEvent) => void,
|
||||||
): ThunkResponse<UploadCredential> {
|
): ThunkResponse<UploadCredential> {
|
||||||
return async (dispatch, _getState) => {
|
return async (dispatch, _getState) => {
|
||||||
|
const streaming = chunk instanceof EncryptedBlob;
|
||||||
|
|
||||||
return await dispatch(
|
return await dispatch(
|
||||||
send(
|
send(
|
||||||
`/file/upload/${sessionID}/${index}`,
|
`/file/upload/${sessionID}/${index}`,
|
||||||
{
|
{
|
||||||
data: chunk,
|
adapter: streaming ? "fetch" : "xhr",
|
||||||
|
data: streaming ? chunk.stream() : chunk,
|
||||||
cancelToken: cancel,
|
cancelToken: cancel,
|
||||||
onUploadProgress: onProgress,
|
onUploadProgress: onProgress,
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
|
...(streaming && { "X-Expected-Entity-Length": chunk.size?.toString() ?? "0" }),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,8 @@ export interface StoragePolicy {
|
||||||
type: PolicyType;
|
type: PolicyType;
|
||||||
relay?: boolean;
|
relay?: boolean;
|
||||||
chunk_concurrency?: number;
|
chunk_concurrency?: number;
|
||||||
|
encryption?: boolean;
|
||||||
|
streaming_encryption?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PaginationResults {
|
export interface PaginationResults {
|
||||||
|
|
@ -490,6 +492,10 @@ export interface CreateViewerSessionService {
|
||||||
version?: string;
|
version?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum EncryptionAlgorithm {
|
||||||
|
aes256ctr = "aes-256-ctr",
|
||||||
|
}
|
||||||
|
|
||||||
export interface UploadSessionRequest {
|
export interface UploadSessionRequest {
|
||||||
uri: string;
|
uri: string;
|
||||||
size: number;
|
size: number;
|
||||||
|
|
@ -500,6 +506,13 @@ export interface UploadSessionRequest {
|
||||||
[key: string]: string;
|
[key: string]: string;
|
||||||
};
|
};
|
||||||
mime_type?: string;
|
mime_type?: string;
|
||||||
|
encryption_supported?: EncryptionAlgorithm[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EncryptMetadata {
|
||||||
|
algorithm: EncryptionAlgorithm;
|
||||||
|
key_plain_text: string;
|
||||||
|
iv: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UploadCredential {
|
export interface UploadCredential {
|
||||||
|
|
@ -519,6 +532,7 @@ export interface UploadCredential {
|
||||||
callback_secret: string;
|
callback_secret: string;
|
||||||
mime_type?: string;
|
mime_type?: string;
|
||||||
upload_policy?: string;
|
upload_policy?: string;
|
||||||
|
encrypt_metadata?: EncryptMetadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DeleteUploadSessionService {
|
export interface DeleteUploadSessionService {
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,14 @@
|
||||||
import { OneDriveChunkResponse, QiniuChunkResponse, QiniuFinishUploadRequest, QiniuPartsInfo, S3Part } from "../types";
|
import { CancelToken } from "axios";
|
||||||
import { OBJtoXML, request } from "../utils";
|
import {
|
||||||
|
sendCreateUploadSession,
|
||||||
|
sendDeleteUploadSession,
|
||||||
|
sendOneDriveCompleteUpload,
|
||||||
|
sendS3LikeCompleteUpload,
|
||||||
|
sendUploadChunk,
|
||||||
|
} from "../../../../api/api.ts";
|
||||||
|
import { UploadCredential, UploadSessionRequest } from "../../../../api/explorer.ts";
|
||||||
|
import { AppError } from "../../../../api/request.ts";
|
||||||
|
import { store } from "../../../../redux/store.ts";
|
||||||
import {
|
import {
|
||||||
CreateUploadSessionError,
|
CreateUploadSessionError,
|
||||||
DeleteUploadSessionError,
|
DeleteUploadSessionError,
|
||||||
|
|
@ -16,19 +25,11 @@ import {
|
||||||
SlaveChunkUploadError,
|
SlaveChunkUploadError,
|
||||||
UpyunUploadError,
|
UpyunUploadError,
|
||||||
} from "../errors";
|
} from "../errors";
|
||||||
import { ChunkInfo, ChunkProgress } from "../uploader/chunk";
|
import { OneDriveChunkResponse, QiniuChunkResponse, QiniuFinishUploadRequest, QiniuPartsInfo, S3Part } from "../types";
|
||||||
import { Progress } from "../uploader/base";
|
import { Progress } from "../uploader/base";
|
||||||
import { CancelToken } from "axios";
|
import { ChunkInfo, ChunkProgress } from "../uploader/chunk";
|
||||||
import { UploadCredential, UploadSessionRequest } from "../../../../api/explorer.ts";
|
import { EncryptedBlob } from "../uploader/encrypt/blob.ts";
|
||||||
import { store } from "../../../../redux/store.ts";
|
import { OBJtoXML, request } from "../utils";
|
||||||
import {
|
|
||||||
sendCreateUploadSession,
|
|
||||||
sendDeleteUploadSession,
|
|
||||||
sendOneDriveCompleteUpload,
|
|
||||||
sendS3LikeCompleteUpload,
|
|
||||||
sendUploadChunk,
|
|
||||||
} from "../../../../api/api.ts";
|
|
||||||
import { AppError } from "../../../../api/request.ts";
|
|
||||||
|
|
||||||
export async function createUploadSession(req: UploadSessionRequest, _cancel: CancelToken): Promise<UploadCredential> {
|
export async function createUploadSession(req: UploadSessionRequest, _cancel: CancelToken): Promise<UploadCredential> {
|
||||||
try {
|
try {
|
||||||
|
|
@ -85,13 +86,16 @@ export async function slaveUploadChunk(
|
||||||
onProgress: (p: Progress) => void,
|
onProgress: (p: Progress) => void,
|
||||||
cancel: CancelToken,
|
cancel: CancelToken,
|
||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
|
const streaming = chunk.chunk instanceof EncryptedBlob;
|
||||||
const res = await request<any>(`${url}?chunk=${chunk.index}`, {
|
const res = await request<any>(`${url}?chunk=${chunk.index}`, {
|
||||||
method: "post",
|
method: "post",
|
||||||
|
adapter: streaming ? "fetch" : "xhr",
|
||||||
headers: {
|
headers: {
|
||||||
"content-type": "application/octet-stream",
|
"content-type": "application/octet-stream",
|
||||||
Authorization: credential,
|
Authorization: credential,
|
||||||
|
...(streaming && { "X-Expected-Entity-Length": chunk.chunk.size?.toString() ?? "0" }),
|
||||||
},
|
},
|
||||||
data: chunk.chunk,
|
data: streaming ? chunk.chunk.stream() : chunk.chunk,
|
||||||
onUploadProgress: (progressEvent) => {
|
onUploadProgress: (progressEvent) => {
|
||||||
onProgress({
|
onProgress({
|
||||||
loaded: progressEvent.loaded,
|
loaded: progressEvent.loaded,
|
||||||
|
|
@ -115,13 +119,16 @@ export async function oneDriveUploadChunk(
|
||||||
onProgress: (p: Progress) => void,
|
onProgress: (p: Progress) => void,
|
||||||
cancel: CancelToken,
|
cancel: CancelToken,
|
||||||
): Promise<OneDriveChunkResponse> {
|
): Promise<OneDriveChunkResponse> {
|
||||||
|
const streaming = chunk.chunk instanceof EncryptedBlob;
|
||||||
const res = await request<OneDriveChunkResponse>(url, {
|
const res = await request<OneDriveChunkResponse>(url, {
|
||||||
method: range === "" ? "get" : "put",
|
method: range === "" ? "get" : "put",
|
||||||
|
adapter: streaming ? "fetch" : "xhr",
|
||||||
headers: {
|
headers: {
|
||||||
"content-type": "application/octet-stream",
|
"content-type": "application/octet-stream",
|
||||||
|
...(streaming && { "Content-Length": chunk.chunk.size?.toString() ?? "0" }),
|
||||||
...(range !== "" && { "content-range": range }),
|
...(range !== "" && { "content-range": range }),
|
||||||
},
|
},
|
||||||
data: chunk.chunk,
|
data: streaming ? chunk.chunk.stream() : chunk.chunk,
|
||||||
onUploadProgress: (progressEvent) => {
|
onUploadProgress: (progressEvent) => {
|
||||||
onProgress({
|
onProgress({
|
||||||
loaded: progressEvent.loaded,
|
loaded: progressEvent.loaded,
|
||||||
|
|
@ -158,12 +165,14 @@ export async function s3LikeUploadChunk(
|
||||||
onProgress: (p: Progress) => void,
|
onProgress: (p: Progress) => void,
|
||||||
cancel: CancelToken,
|
cancel: CancelToken,
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
|
const streaming = chunk.chunk instanceof EncryptedBlob;
|
||||||
const res = await request<string>(url, {
|
const res = await request<string>(url, {
|
||||||
method: "put",
|
method: "put",
|
||||||
|
adapter: streaming ? "fetch" : "xhr",
|
||||||
headers: {
|
headers: {
|
||||||
"content-type": "application/octet-stream",
|
"content-type": "application/octet-stream",
|
||||||
},
|
},
|
||||||
data: chunk.chunk,
|
data: streaming ? chunk.chunk.stream() : chunk.chunk,
|
||||||
onUploadProgress: (progressEvent) => {
|
onUploadProgress: (progressEvent) => {
|
||||||
onProgress({
|
onProgress({
|
||||||
loaded: progressEvent.loaded,
|
loaded: progressEvent.loaded,
|
||||||
|
|
@ -258,13 +267,15 @@ export async function qiniuDriveUploadChunk(
|
||||||
onProgress: (p: Progress) => void,
|
onProgress: (p: Progress) => void,
|
||||||
cancel: CancelToken,
|
cancel: CancelToken,
|
||||||
): Promise<QiniuChunkResponse> {
|
): Promise<QiniuChunkResponse> {
|
||||||
|
const streaming = chunk.chunk instanceof EncryptedBlob;
|
||||||
const res = await request<QiniuChunkResponse>(`${url}/${chunk.index + 1}`, {
|
const res = await request<QiniuChunkResponse>(`${url}/${chunk.index + 1}`, {
|
||||||
method: "put",
|
method: "put",
|
||||||
|
adapter: streaming ? "fetch" : "xhr",
|
||||||
headers: {
|
headers: {
|
||||||
"content-type": "application/octet-stream",
|
"content-type": "application/octet-stream",
|
||||||
authorization: "UpToken " + upToken,
|
authorization: "UpToken " + upToken,
|
||||||
},
|
},
|
||||||
data: chunk.chunk,
|
data: streaming ? chunk.chunk.stream() : chunk.chunk,
|
||||||
onUploadProgress: (progressEvent) => {
|
onUploadProgress: (progressEvent) => {
|
||||||
onProgress({
|
onProgress({
|
||||||
loaded: progressEvent.loaded,
|
loaded: progressEvent.loaded,
|
||||||
|
|
@ -321,7 +332,7 @@ export async function qiniuFinishUpload(
|
||||||
|
|
||||||
export async function upyunFormUploadChunk(
|
export async function upyunFormUploadChunk(
|
||||||
url: string,
|
url: string,
|
||||||
file: File,
|
file: Blob,
|
||||||
policy: string,
|
policy: string,
|
||||||
credential: string,
|
credential: string,
|
||||||
onProgress: (p: Progress) => void,
|
onProgress: (p: Progress) => void,
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { ChunkProgress } from "./uploader/chunk";
|
|
||||||
import { StoragePolicy, UploadCredential } from "../../../api/explorer.ts";
|
import { StoragePolicy, UploadCredential } from "../../../api/explorer.ts";
|
||||||
|
import { ChunkProgress } from "./uploader/chunk";
|
||||||
|
|
||||||
export enum TaskType {
|
export enum TaskType {
|
||||||
file,
|
file,
|
||||||
|
|
@ -13,6 +13,7 @@ export interface Task {
|
||||||
policy: StoragePolicy;
|
policy: StoragePolicy;
|
||||||
dst: string;
|
dst: string;
|
||||||
file: File;
|
file: File;
|
||||||
|
blob: Blob;
|
||||||
child?: Task[];
|
child?: Task[];
|
||||||
session?: UploadCredential;
|
session?: UploadCredential;
|
||||||
chunkProgress: ChunkProgress[];
|
chunkProgress: ChunkProgress[];
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
// 所有 Uploader 的基类
|
// 所有 Uploader 的基类
|
||||||
import axios, { CanceledError, CancelTokenSource } from "axios";
|
import axios, { CanceledError, CancelTokenSource } from "axios";
|
||||||
import { PolicyType } from "../../../../api/explorer.ts";
|
import { EncryptionAlgorithm, PolicyType } from "../../../../api/explorer.ts";
|
||||||
import CrUri from "../../../../util/uri.ts";
|
import CrUri from "../../../../util/uri.ts";
|
||||||
import { createUploadSession, deleteUploadSession } from "../api";
|
import { createUploadSession, deleteUploadSession } from "../api";
|
||||||
import { UploaderError } from "../errors";
|
import { UploaderError } from "../errors";
|
||||||
|
|
@ -10,6 +10,7 @@ import { Task } from "../types";
|
||||||
import * as utils from "../utils";
|
import * as utils from "../utils";
|
||||||
import { CancelToken } from "../utils/request";
|
import { CancelToken } from "../utils/request";
|
||||||
import { validate } from "../utils/validator";
|
import { validate } from "../utils/validator";
|
||||||
|
import { EncryptedBlob } from "./encrypt/blob.ts";
|
||||||
|
|
||||||
export enum Status {
|
export enum Status {
|
||||||
added,
|
added,
|
||||||
|
|
@ -146,6 +147,8 @@ export default abstract class Base {
|
||||||
last_modified: this.task.file.lastModified,
|
last_modified: this.task.file.lastModified,
|
||||||
mime_type: this.task.file.type,
|
mime_type: this.task.file.type,
|
||||||
entity_type: this.task.overwrite ? "version" : undefined,
|
entity_type: this.task.overwrite ? "version" : undefined,
|
||||||
|
encryption_supported:
|
||||||
|
this.task.policy.encryption && "crypto" in window ? [EncryptionAlgorithm.aes256ctr] : undefined,
|
||||||
},
|
},
|
||||||
this.cancelToken.token,
|
this.cancelToken.token,
|
||||||
);
|
);
|
||||||
|
|
@ -157,6 +160,20 @@ export default abstract class Base {
|
||||||
this.logger.info("Resume upload from cached ctx:", cachedInfo);
|
this.logger.info("Resume upload from cached ctx:", cachedInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.task.session?.encrypt_metadata && !this.task.policy?.relay) {
|
||||||
|
// Check browser support for encryption
|
||||||
|
if (!("crypto" in window)) {
|
||||||
|
this.logger.error("Encryption is not supported in this browser");
|
||||||
|
this.setError(new Error("Web Crypto API is not supported in this browser"));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const encryptedBlob = new EncryptedBlob(this.task.file, this.task.session?.encrypt_metadata);
|
||||||
|
this.task.blob = encryptedBlob;
|
||||||
|
} else {
|
||||||
|
this.task.blob = this.task.file;
|
||||||
|
}
|
||||||
|
|
||||||
this.transit(Status.processing);
|
this.transit(Status.processing);
|
||||||
await this.upload();
|
await this.upload();
|
||||||
await this.afterUpload();
|
await this.afterUpload();
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import * as utils from "../utils";
|
import * as utils from "../utils";
|
||||||
import Base from "./base";
|
import Base from "./base";
|
||||||
|
import { EncryptedBlob } from "./encrypt/blob";
|
||||||
|
|
||||||
export interface ChunkProgress {
|
export interface ChunkProgress {
|
||||||
loaded: number;
|
loaded: number;
|
||||||
|
|
@ -118,6 +119,9 @@ export default abstract class Chunk extends Base {
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
if (chunkInfo.chunk instanceof EncryptedBlob && !this.task.policy.streaming_encryption) {
|
||||||
|
chunkInfo.chunk = new Blob([await chunkInfo.chunk.bytes()]);
|
||||||
|
}
|
||||||
await this.uploadChunk(chunkInfo);
|
await this.uploadChunk(chunkInfo);
|
||||||
this.logger.info(`Chunk [${chunkInfo.index}] uploaded successfully.`);
|
this.logger.info(`Chunk [${chunkInfo.index}] uploaded successfully.`);
|
||||||
onComplete(); // Call callback immediately after successful upload
|
onComplete(); // Call callback immediately after successful upload
|
||||||
|
|
@ -158,7 +162,7 @@ export default abstract class Chunk extends Base {
|
||||||
}
|
}
|
||||||
|
|
||||||
private initBeforeUploadChunks() {
|
private initBeforeUploadChunks() {
|
||||||
this.chunks = utils.getChunks(this.task.file, this.task.session?.chunk_size);
|
this.chunks = utils.getChunks(this.task.blob, this.task.session?.chunk_size);
|
||||||
const cachedInfo = utils.getResumeCtx(this.task, this.logger);
|
const cachedInfo = utils.getResumeCtx(this.task, this.logger);
|
||||||
if (cachedInfo == null) {
|
if (cachedInfo == null) {
|
||||||
this.task.chunkProgress = this.chunks.map(
|
this.task.chunkProgress = this.chunks.map(
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,303 @@
|
||||||
|
import { EncryptMetadata, EncryptionAlgorithm } from "../../../../../api/explorer";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* EncryptedBlob wraps a Blob and encrypts its stream on-the-fly using the provided encryption metadata.
|
||||||
|
* This allows for client-side encryption during upload without loading the entire file into memory.
|
||||||
|
*
|
||||||
|
* ## Counter Handling for AES-CTR Mode
|
||||||
|
*
|
||||||
|
* AES-CTR (Counter) mode encryption requires careful counter management:
|
||||||
|
* - Each 16-byte block uses a unique counter value
|
||||||
|
* - Counter increments by 1 for each block
|
||||||
|
* - For byte position N: counter = initial_counter + floor(N / 16)
|
||||||
|
*
|
||||||
|
* ## Slicing Support
|
||||||
|
*
|
||||||
|
* When slice() is called, the new EncryptedBlob tracks the byte offset (counterOffset).
|
||||||
|
* This ensures that:
|
||||||
|
* 1. Block-aligned slices (offset % 16 == 0) encrypt correctly
|
||||||
|
* 2. Non-block-aligned slices handle partial blocks by padding and extracting
|
||||||
|
*
|
||||||
|
* Example:
|
||||||
|
* ```
|
||||||
|
* const encrypted = new EncryptedBlob(file, metadata);
|
||||||
|
* const chunk1 = encrypted.slice(0, 5MB); // Encrypts bytes [0, 5MB) with counter starting at base
|
||||||
|
* const chunk2 = encrypted.slice(5MB, 10MB); // Encrypts bytes [5MB, 10MB) with counter offset by 5MB/16 blocks
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* The encrypted output of sliced chunks will match what would be produced if the entire
|
||||||
|
* blob was encrypted as one stream, maintaining consistency for chunked uploads.
|
||||||
|
*/
|
||||||
|
export class EncryptedBlob implements Blob {
|
||||||
|
private readonly blob: Blob;
|
||||||
|
private readonly metadata: EncryptMetadata;
|
||||||
|
private readonly counterOffset: number;
|
||||||
|
private cryptoKey?: CryptoKey;
|
||||||
|
|
||||||
|
constructor(blob: Blob, metadata: EncryptMetadata, counterOffset: number = 0) {
|
||||||
|
this.blob = blob;
|
||||||
|
this.metadata = metadata;
|
||||||
|
this.counterOffset = counterOffset;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the size of the original blob.
|
||||||
|
* Note: Encrypted size may differ depending on algorithm, but for AES-CTR it remains the same.
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this.blob.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
get type(): string {
|
||||||
|
return this.blob.type;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts hex string or base64 string to Uint8Array
|
||||||
|
*/
|
||||||
|
private stringToUint8Array(str: string, encoding: "hex" | "base64" = "base64"): Uint8Array<ArrayBuffer> {
|
||||||
|
if (encoding === "hex") {
|
||||||
|
// Remove any whitespace or separators
|
||||||
|
const cleaned = str.replace(/[^0-9a-fA-F]/g, "");
|
||||||
|
const buffer = new ArrayBuffer(cleaned.length / 2);
|
||||||
|
const bytes = new Uint8Array(buffer);
|
||||||
|
for (let i = 0; i < cleaned.length; i += 2) {
|
||||||
|
bytes[i / 2] = parseInt(cleaned.substring(i, i + 2), 16);
|
||||||
|
}
|
||||||
|
return bytes;
|
||||||
|
} else {
|
||||||
|
// base64 decoding
|
||||||
|
const binaryString = atob(str);
|
||||||
|
const buffer = new ArrayBuffer(binaryString.length);
|
||||||
|
const bytes = new Uint8Array(buffer);
|
||||||
|
for (let i = 0; i < binaryString.length; i++) {
|
||||||
|
bytes[i] = binaryString.charCodeAt(i);
|
||||||
|
}
|
||||||
|
return bytes;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Increment a counter (Uint8Array) by a given number of blocks
|
||||||
|
*/
|
||||||
|
private incrementCounter(counter: Uint8Array<ArrayBuffer>, blocks: number): Uint8Array<ArrayBuffer> {
|
||||||
|
// Create a copy to avoid modifying the original counter
|
||||||
|
const result = new Uint8Array(counter);
|
||||||
|
|
||||||
|
// AES-CTR uses big-endian counter increment
|
||||||
|
// Start from the least significant byte (rightmost) and work backwards
|
||||||
|
let carry = blocks;
|
||||||
|
for (let i = result.length - 1; i >= 0 && carry > 0; i--) {
|
||||||
|
const sum = result[i] + carry;
|
||||||
|
result[i] = sum & 0xff;
|
||||||
|
carry = sum >>> 8; // Shift right by 8 bits to propagate overflow as carry
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Import the encryption key for use with Web Crypto API
|
||||||
|
*/
|
||||||
|
private async importKey(): Promise<CryptoKey> {
|
||||||
|
if (this.cryptoKey) {
|
||||||
|
return this.cryptoKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyBytes = this.stringToUint8Array(this.metadata.key_plain_text);
|
||||||
|
|
||||||
|
switch (this.metadata.algorithm) {
|
||||||
|
case EncryptionAlgorithm.aes256ctr:
|
||||||
|
this.cryptoKey = await crypto.subtle.importKey("raw", keyBytes, { name: "AES-CTR" }, false, ["encrypt"]);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported encryption algorithm: ${this.metadata.algorithm}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.cryptoKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an encryption transform stream
|
||||||
|
*/
|
||||||
|
private async createEncryptStream(): Promise<TransformStream<Uint8Array, Uint8Array>> {
|
||||||
|
const cryptoKey = await this.importKey();
|
||||||
|
const iv = this.stringToUint8Array(this.metadata.iv);
|
||||||
|
|
||||||
|
// Create counter value (16 bytes IV) and apply offset for sliced blobs
|
||||||
|
let counter = new Uint8Array(16);
|
||||||
|
counter.set(iv.slice(0, 16));
|
||||||
|
|
||||||
|
// Apply counter offset based on byte position (each block is 16 bytes)
|
||||||
|
// For non-block-aligned offsets, we handle partial blocks correctly
|
||||||
|
if (this.counterOffset > 0) {
|
||||||
|
const blockOffset = Math.floor(this.counterOffset / 16);
|
||||||
|
counter = this.incrementCounter(counter, blockOffset);
|
||||||
|
}
|
||||||
|
|
||||||
|
const that = this;
|
||||||
|
// Track bytes processed to handle partial blocks correctly
|
||||||
|
let totalBytesProcessed = this.counterOffset;
|
||||||
|
// Remember if we've processed the first chunk (which may be non-block-aligned)
|
||||||
|
let isFirstChunk = true;
|
||||||
|
|
||||||
|
return new TransformStream<Uint8Array, Uint8Array>({
|
||||||
|
async transform(chunk, controller) {
|
||||||
|
// Create a new ArrayBuffer copy to ensure proper type for crypto API
|
||||||
|
const buffer = new ArrayBuffer(chunk.byteLength);
|
||||||
|
const chunkData = new Uint8Array(buffer);
|
||||||
|
if (chunk instanceof Uint8Array) {
|
||||||
|
chunkData.set(new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength));
|
||||||
|
} else {
|
||||||
|
chunkData.set(new Uint8Array(chunk));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle partial block at the start (only for the first chunk if offset is not block-aligned)
|
||||||
|
const offsetInBlock = totalBytesProcessed % 16;
|
||||||
|
if (isFirstChunk && offsetInBlock !== 0) {
|
||||||
|
// We're starting mid-block. Need to encrypt the partial block correctly.
|
||||||
|
// Pad to 16 bytes, encrypt, then extract only the bytes we need
|
||||||
|
const partialBlockSize = Math.min(16 - offsetInBlock, chunkData.byteLength);
|
||||||
|
const paddedBlock = new Uint8Array(16);
|
||||||
|
paddedBlock.set(chunkData.slice(0, partialBlockSize), offsetInBlock);
|
||||||
|
|
||||||
|
const encryptedBlock = await crypto.subtle.encrypt(
|
||||||
|
{ name: "AES-CTR", counter, length: 128 },
|
||||||
|
cryptoKey,
|
||||||
|
paddedBlock,
|
||||||
|
);
|
||||||
|
|
||||||
|
const encryptedBytes = new Uint8Array(encryptedBlock);
|
||||||
|
controller.enqueue(encryptedBytes.slice(offsetInBlock, offsetInBlock + partialBlockSize));
|
||||||
|
|
||||||
|
// Increment counter by 1 block
|
||||||
|
counter = that.incrementCounter(counter, 1);
|
||||||
|
totalBytesProcessed += partialBlockSize;
|
||||||
|
|
||||||
|
// Process remaining bytes if any
|
||||||
|
if (partialBlockSize < chunkData.byteLength) {
|
||||||
|
const remaining = chunkData.slice(partialBlockSize);
|
||||||
|
const encryptedRemaining = await crypto.subtle.encrypt(
|
||||||
|
{ name: "AES-CTR", counter, length: 128 },
|
||||||
|
cryptoKey,
|
||||||
|
remaining,
|
||||||
|
);
|
||||||
|
controller.enqueue(new Uint8Array(encryptedRemaining));
|
||||||
|
|
||||||
|
// Increment counter by number of blocks processed
|
||||||
|
const blocksProcessed = Math.ceil(remaining.byteLength / 16);
|
||||||
|
counter = that.incrementCounter(counter, blocksProcessed);
|
||||||
|
totalBytesProcessed += remaining.byteLength;
|
||||||
|
}
|
||||||
|
isFirstChunk = false;
|
||||||
|
} else {
|
||||||
|
// Normal case: block-aligned encryption
|
||||||
|
const encrypted = await crypto.subtle.encrypt(
|
||||||
|
{ name: "AES-CTR", counter, length: 128 },
|
||||||
|
cryptoKey,
|
||||||
|
chunkData,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Send encrypted chunk
|
||||||
|
controller.enqueue(new Uint8Array(encrypted));
|
||||||
|
|
||||||
|
// Update counter: increment by number of 16-byte blocks (rounded up for partial blocks)
|
||||||
|
const blocksProcessed = Math.ceil(chunkData.byteLength / 16);
|
||||||
|
counter = that.incrementCounter(counter, blocksProcessed);
|
||||||
|
totalBytesProcessed += chunkData.byteLength;
|
||||||
|
isFirstChunk = false;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an encrypted stream of the blob's contents
|
||||||
|
*/
|
||||||
|
stream(): ReadableStream {
|
||||||
|
const originalStream = this.blob.stream();
|
||||||
|
const encryptStreamPromise = this.createEncryptStream();
|
||||||
|
|
||||||
|
// Create a passthrough stream that will pipe through the encrypt stream once it's ready
|
||||||
|
return new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
const encryptStream = await encryptStreamPromise;
|
||||||
|
const encryptedStream = originalStream.pipeThrough(encryptStream);
|
||||||
|
const reader = encryptedStream.getReader();
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
controller.enqueue(value);
|
||||||
|
}
|
||||||
|
controller.close();
|
||||||
|
} catch (error) {
|
||||||
|
controller.error(error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns encrypted blob slice
|
||||||
|
* The counter offset is calculated to ensure correct encryption alignment
|
||||||
|
* for the sliced portion of the blob.
|
||||||
|
*/
|
||||||
|
slice(start?: number, end?: number, contentType?: string): Blob {
|
||||||
|
const slicedBlob = this.blob.slice(start, end, contentType);
|
||||||
|
|
||||||
|
// Calculate the new counter offset
|
||||||
|
// The offset accumulates: if this blob already has an offset, add to it
|
||||||
|
const newOffset = this.counterOffset + (start || 0);
|
||||||
|
|
||||||
|
return new EncryptedBlob(slicedBlob, this.metadata, newOffset);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns encrypted data as ArrayBuffer
|
||||||
|
*/
|
||||||
|
async arrayBuffer(): Promise<ArrayBuffer> {
|
||||||
|
const stream = this.stream();
|
||||||
|
const reader = stream.getReader();
|
||||||
|
const chunks: Uint8Array[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
chunks.push(value);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
reader.releaseLock();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Concatenate all chunks
|
||||||
|
const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
||||||
|
const result = new Uint8Array(totalLength);
|
||||||
|
let offset = 0;
|
||||||
|
for (const chunk of chunks) {
|
||||||
|
result.set(chunk, offset);
|
||||||
|
offset += chunk.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns encrypted data as text (likely not useful, but required by Blob interface)
|
||||||
|
*/
|
||||||
|
async text(): Promise<string> {
|
||||||
|
const buffer = await this.arrayBuffer();
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
return decoder.decode(buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns encrypted data as Uint8Array (required by Blob interface)
|
||||||
|
*/
|
||||||
|
async bytes(): Promise<Uint8Array<ArrayBuffer>> {
|
||||||
|
const buffer = await this.arrayBuffer();
|
||||||
|
return new Uint8Array(buffer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,12 +1,16 @@
|
||||||
import Base from "./base";
|
|
||||||
import { upyunFormUploadChunk } from "../api";
|
import { upyunFormUploadChunk } from "../api";
|
||||||
|
import Base from "./base";
|
||||||
|
import { EncryptedBlob } from "./encrypt/blob";
|
||||||
|
|
||||||
export default class Upyun extends Base {
|
export default class Upyun extends Base {
|
||||||
public upload = async () => {
|
public upload = async () => {
|
||||||
this.logger.info("Starting uploading file stream:", this.task.file);
|
this.logger.info("Starting uploading file stream:", this.task.file);
|
||||||
|
if (this.task.blob instanceof EncryptedBlob && !this.task.policy.streaming_encryption) {
|
||||||
|
this.task.blob = new Blob([await this.task.blob.bytes()]);
|
||||||
|
}
|
||||||
await upyunFormUploadChunk(
|
await upyunFormUploadChunk(
|
||||||
this.task.session?.upload_urls[0]!,
|
this.task.session?.upload_urls[0]!,
|
||||||
this.task.file,
|
this.task.blob,
|
||||||
this.task.session?.upload_policy!,
|
this.task.session?.upload_policy!,
|
||||||
this.task.session?.credential!,
|
this.task.session?.credential!,
|
||||||
(p) => {
|
(p) => {
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { Task } from "../types";
|
||||||
import { ChunkProgress } from "../uploader/chunk";
|
import { ChunkProgress } from "../uploader/chunk";
|
||||||
|
|
||||||
// 文件分块
|
// 文件分块
|
||||||
export function getChunks(file: File, chunkByteSize: number | undefined): Blob[] {
|
export function getChunks(file: Blob, chunkByteSize: number | undefined): Blob[] {
|
||||||
// 如果 chunkByteSize 比文件大或为0,则直接取文件的大小
|
// 如果 chunkByteSize 比文件大或为0,则直接取文件的大小
|
||||||
if (!chunkByteSize || chunkByteSize > file.size || chunkByteSize == 0) {
|
if (!chunkByteSize || chunkByteSize > file.size || chunkByteSize == 0) {
|
||||||
chunkByteSize = file.size;
|
chunkByteSize = file.size;
|
||||||
|
|
|
||||||
|
|
@ -1177,7 +1177,7 @@ export function batchGetDirectLinks(index: number, files: FileResponse[]): AppTh
|
||||||
export function resetThumbnails(files: FileResponse[]): AppThunk {
|
export function resetThumbnails(files: FileResponse[]): AppThunk {
|
||||||
return async (dispatch, getState) => {
|
return async (dispatch, getState) => {
|
||||||
const thumbConfigLoaded = getState().siteConfig.thumb.loaded;
|
const thumbConfigLoaded = getState().siteConfig.thumb.loaded;
|
||||||
if (thumbConfigLoaded == ConfigLoadState.NotLoaded) {
|
if (thumbConfigLoaded != ConfigLoadState.Loaded) {
|
||||||
await dispatch(loadSiteConfig("thumb"));
|
await dispatch(loadSiteConfig("thumb"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
28
yarn.lock
28
yarn.lock
|
|
@ -4592,13 +4592,13 @@ available-typed-arrays@^1.0.7:
|
||||||
dependencies:
|
dependencies:
|
||||||
possible-typed-array-names "^1.0.0"
|
possible-typed-array-names "^1.0.0"
|
||||||
|
|
||||||
axios@^1.6.2:
|
axios@^1.12.2:
|
||||||
version "1.6.2"
|
version "1.12.2"
|
||||||
resolved "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz"
|
resolved "https://registry.yarnpkg.com/axios/-/axios-1.12.2.tgz#6c307390136cf7a2278d09cec63b136dfc6e6da7"
|
||||||
integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==
|
integrity sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==
|
||||||
dependencies:
|
dependencies:
|
||||||
follow-redirects "^1.15.0"
|
follow-redirects "^1.15.6"
|
||||||
form-data "^4.0.0"
|
form-data "^4.0.4"
|
||||||
proxy-from-env "^1.1.0"
|
proxy-from-env "^1.1.0"
|
||||||
|
|
||||||
axios@^1.7.4:
|
axios@^1.7.4:
|
||||||
|
|
@ -6231,11 +6231,6 @@ flatted@^3.2.9:
|
||||||
resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz"
|
resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz"
|
||||||
integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==
|
integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==
|
||||||
|
|
||||||
follow-redirects@^1.15.0:
|
|
||||||
version "1.15.3"
|
|
||||||
resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz"
|
|
||||||
integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==
|
|
||||||
|
|
||||||
follow-redirects@^1.15.6:
|
follow-redirects@^1.15.6:
|
||||||
version "1.15.9"
|
version "1.15.9"
|
||||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1"
|
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1"
|
||||||
|
|
@ -6257,6 +6252,17 @@ form-data@^4.0.0:
|
||||||
combined-stream "^1.0.8"
|
combined-stream "^1.0.8"
|
||||||
mime-types "^2.1.12"
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
|
form-data@^4.0.4:
|
||||||
|
version "4.0.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.4.tgz#784cdcce0669a9d68e94d11ac4eea98088edd2c4"
|
||||||
|
integrity sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==
|
||||||
|
dependencies:
|
||||||
|
asynckit "^0.4.0"
|
||||||
|
combined-stream "^1.0.8"
|
||||||
|
es-set-tostringtag "^2.1.0"
|
||||||
|
hasown "^2.0.2"
|
||||||
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
format@^0.2.0:
|
format@^0.2.0:
|
||||||
version "0.2.2"
|
version "0.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
|
resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue