mirror of
https://git.v0l.io/Kieran/void.cat.git
synced 2025-11-15 14:57:42 +01:00
Use CryptoJS to hash files >2GB
This commit is contained in:
@@ -34,12 +34,28 @@ public class InfoController : Controller
|
|||||||
var bw = await _statsReporter.GetBandwidth();
|
var bw = await _statsReporter.GetBandwidth();
|
||||||
var storeStats = await _fileMetadata.Stats();
|
var storeStats = await _fileMetadata.Stats();
|
||||||
|
|
||||||
return new(bw, storeStats.Size, storeStats.Files, BuildInfo.GetBuildInfo(),
|
return new()
|
||||||
_settings.CaptchaSettings?.SiteKey,
|
{
|
||||||
await _timeSeriesStats.GetBandwidth(DateTime.UtcNow.AddDays(-30), DateTime.UtcNow),
|
Bandwidth = bw,
|
||||||
_fileStores);
|
TotalBytes = storeStats.Size,
|
||||||
|
Count = storeStats.Files,
|
||||||
|
BuildInfo = BuildInfo.GetBuildInfo(),
|
||||||
|
CaptchaSiteKey = _settings.CaptchaSettings?.SiteKey,
|
||||||
|
TimeSeriesMetrics = await _timeSeriesStats.GetBandwidth(DateTime.UtcNow.AddDays(-30), DateTime.UtcNow),
|
||||||
|
FileStores = _fileStores,
|
||||||
|
UploadSegmentSize = _settings.UploadSegmentSize
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed record GlobalInfo(Bandwidth Bandwidth, ulong TotalBytes, long Count, BuildInfo BuildInfo,
|
public sealed class GlobalInfo
|
||||||
string? CaptchaSiteKey, IEnumerable<BandwidthPoint> TimeSeriesMetrics, IEnumerable<string?> FileStores);
|
{
|
||||||
}
|
public Bandwidth Bandwidth { get; init; }
|
||||||
|
public ulong TotalBytes { get; init; }
|
||||||
|
public long Count { get; init; }
|
||||||
|
public BuildInfo BuildInfo { get; init; }
|
||||||
|
public string? CaptchaSiteKey { get; init; }
|
||||||
|
public IEnumerable<BandwidthPoint> TimeSeriesMetrics { get; init; }
|
||||||
|
public IEnumerable<string?> FileStores { get; init; }
|
||||||
|
public ulong? UploadSegmentSize { get; init; }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -11,7 +11,12 @@ namespace VoidCat.Model
|
|||||||
/// Data directory to store files in
|
/// Data directory to store files in
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public string DataDirectory { get; init; } = "./data";
|
public string DataDirectory { get; init; } = "./data";
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Size in bytes to split uploads into chunks
|
||||||
|
/// </summary>
|
||||||
|
public ulong? UploadSegmentSize { get; init; } = null;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Tor configuration
|
/// Tor configuration
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@@ -30,11 +30,11 @@ public static class FileStorageStartup
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(settings.Postgres))
|
if (settings.HasPostgres())
|
||||||
{
|
{
|
||||||
services.AddTransient<IUserUploadsStore, PostgresUserUploadStore>();
|
services.AddTransient<IUserUploadsStore, PostgresUserUploadStore>();
|
||||||
services.AddTransient<IFileStore, LocalDiskFileStore>();
|
services.AddTransient<IFileStore, LocalDiskFileStore>();
|
||||||
if (settings.MetadataStore == "postgres")
|
if (settings.MetadataStore is "postgres" or "local-disk")
|
||||||
{
|
{
|
||||||
services.AddSingleton<IFileMetadataStore, PostgresFileMetadataStore>();
|
services.AddSingleton<IFileMetadataStore, PostgresFileMetadataStore>();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@hcaptcha/react-hcaptcha": "^1.1.1",
|
"@hcaptcha/react-hcaptcha": "^1.1.1",
|
||||||
"@reduxjs/toolkit": "^1.7.2",
|
"@reduxjs/toolkit": "^1.7.2",
|
||||||
|
"crypto-js": "^4.1.1",
|
||||||
"feather-icons-react": "^0.5.0",
|
"feather-icons-react": "^0.5.0",
|
||||||
"moment": "^2.29.4",
|
"moment": "^2.29.4",
|
||||||
"preval.macro": "^5.0.0",
|
"preval.macro": "^5.0.0",
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import {useEffect, useState} from "react";
|
import {useEffect, useState} from "react";
|
||||||
import {buf2hex, ConstName, FormatBytes} from "./Util";
|
import {ConstName, FormatBytes} from "./Util";
|
||||||
import {RateCalculator} from "./RateCalculator";
|
import {RateCalculator} from "./RateCalculator";
|
||||||
|
import * as CryptoJS from 'crypto-js';
|
||||||
|
|
||||||
import "./FileUpload.css";
|
import "./FileUpload.css";
|
||||||
import {useSelector} from "react-redux";
|
import {useSelector} from "react-redux";
|
||||||
@@ -20,6 +21,7 @@ export const DigestAlgo = "SHA-256";
|
|||||||
|
|
||||||
export function FileUpload(props) {
|
export function FileUpload(props) {
|
||||||
const auth = useSelector(state => state.login.jwt);
|
const auth = useSelector(state => state.login.jwt);
|
||||||
|
const info = useSelector(state => state.info.info);
|
||||||
const [speed, setSpeed] = useState(0);
|
const [speed, setSpeed] = useState(0);
|
||||||
const [progress, setProgress] = useState(0);
|
const [progress, setProgress] = useState(0);
|
||||||
const [result, setResult] = useState();
|
const [result, setResult] = useState();
|
||||||
@@ -84,14 +86,14 @@ export function FileUpload(props) {
|
|||||||
/**
|
/**
|
||||||
* Upload a segment of the file
|
* Upload a segment of the file
|
||||||
* @param segment {ArrayBuffer}
|
* @param segment {ArrayBuffer}
|
||||||
* @param id {string}
|
* @param fullDigest {string} Full file hash
|
||||||
|
* @param id {string?}
|
||||||
* @param editSecret {string?}
|
* @param editSecret {string?}
|
||||||
* @param fullDigest {string?} Full file hash
|
|
||||||
* @param part {int?} Segment number
|
* @param part {int?} Segment number
|
||||||
* @param partOf {int?} Total number of segments
|
* @param partOf {int?} Total number of segments
|
||||||
* @returns {Promise<any>}
|
* @returns {Promise<any>}
|
||||||
*/
|
*/
|
||||||
async function xhrSegment(segment, id, editSecret, fullDigest, part, partOf) {
|
async function xhrSegment(segment, fullDigest, id, editSecret, part, partOf) {
|
||||||
setUState(UploadState.Uploading);
|
setUState(UploadState.Uploading);
|
||||||
|
|
||||||
return await new Promise((resolve, reject) => {
|
return await new Promise((resolve, reject) => {
|
||||||
@@ -133,23 +135,36 @@ export function FileUpload(props) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function doXHRUpload() {
|
async function doXHRUpload() {
|
||||||
// upload file in segments of 50MB
|
let uploadSize = info.uploadSegmentSize ?? Number.MAX_VALUE;
|
||||||
const UploadSize = 50_000_000;
|
|
||||||
|
|
||||||
setUState(UploadState.Hashing);
|
setUState(UploadState.Hashing);
|
||||||
let digest = await crypto.subtle.digest(DigestAlgo, await props.file.arrayBuffer());
|
let hash = await digest(props.file);
|
||||||
|
if(props.file.size >= uploadSize) {
|
||||||
|
await doSplitXHRUpload(hash, uploadSize);
|
||||||
|
} else {
|
||||||
|
let xhr = await xhrSegment(props.file, hash);
|
||||||
|
handleXHRResult(xhr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doSplitXHRUpload(hash, splitSize) {
|
||||||
let xhr = null;
|
let xhr = null;
|
||||||
const segments = Math.ceil(props.file.size / UploadSize);
|
setProgress(0);
|
||||||
|
const segments = Math.ceil(props.file.size / splitSize);
|
||||||
for (let s = 0; s < segments; s++) {
|
for (let s = 0; s < segments; s++) {
|
||||||
calc.ResetLastLoaded();
|
calc.ResetLastLoaded();
|
||||||
let offset = s * UploadSize;
|
let offset = s * splitSize;
|
||||||
let slice = props.file.slice(offset, offset + UploadSize, props.file.type);
|
let slice = props.file.slice(offset, offset + splitSize, props.file.type);
|
||||||
let segment = await slice.arrayBuffer();
|
let segment = await slice.arrayBuffer();
|
||||||
xhr = await xhrSegment(segment, xhr?.file?.id, xhr?.file?.metadata?.editSecret, buf2hex(digest), s + 1, segments);
|
xhr = await xhrSegment(segment, xhr?.file?.id, xhr?.file?.metadata?.editSecret, hash, s + 1, segments);
|
||||||
if (!xhr.ok) {
|
if (!xhr.ok) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
handleXHRResult(xhr);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleXHRResult(xhr) {
|
||||||
if (xhr.ok) {
|
if (xhr.ok) {
|
||||||
setUState(UploadState.Done);
|
setUState(UploadState.Done);
|
||||||
setResult(xhr.file);
|
setResult(xhr.file);
|
||||||
@@ -159,6 +174,19 @@ export function FileUpload(props) {
|
|||||||
setResult(xhr.errorMessage);
|
setResult(xhr.errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function digest(file) {
|
||||||
|
const chunkSize = 10_000_000;
|
||||||
|
let sha = CryptoJS.algo.SHA256.create();
|
||||||
|
for (let x = 0; x < Math.ceil(file.size / chunkSize); x++) {
|
||||||
|
let offset = x * chunkSize;
|
||||||
|
let slice = file.slice(offset, offset + chunkSize, file.type);
|
||||||
|
let data = Uint32Array.from(await slice.arrayBuffer());
|
||||||
|
sha.update(new CryptoJS.lib.WordArray.init(data, slice.length));
|
||||||
|
setProgress(offset / parseFloat(file.size));
|
||||||
|
}
|
||||||
|
return sha.finalize().toString();
|
||||||
|
}
|
||||||
|
|
||||||
function renderStatus() {
|
function renderStatus() {
|
||||||
if (result) {
|
if (result) {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import moment from "moment";
|
|||||||
import {useSelector} from "react-redux";
|
import {useSelector} from "react-redux";
|
||||||
|
|
||||||
export function GlobalStats() {
|
export function GlobalStats() {
|
||||||
let stats = useSelector(state => state.info.stats);
|
let stats = useSelector(state => state.info.info);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import {InlineProfile} from "./InlineProfile";
|
|||||||
import {useApi} from "./Api";
|
import {useApi} from "./Api";
|
||||||
import {logout, setProfile} from "./LoginState";
|
import {logout, setProfile} from "./LoginState";
|
||||||
import {useEffect} from "react";
|
import {useEffect} from "react";
|
||||||
import {setStats} from "./SiteInfoStore";
|
import {setInfo} from "./SiteInfoStore";
|
||||||
|
|
||||||
export function Header() {
|
export function Header() {
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
@@ -26,7 +26,7 @@ export function Header() {
|
|||||||
async function loadStats() {
|
async function loadStats() {
|
||||||
let req = await Api.info();
|
let req = await Api.info();
|
||||||
if (req.ok) {
|
if (req.ok) {
|
||||||
dispatch(setStats(await req.json()));
|
dispatch(setInfo(await req.json()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,14 +3,14 @@
|
|||||||
export const SiteInfoState = createSlice({
|
export const SiteInfoState = createSlice({
|
||||||
name: "SiteInfo",
|
name: "SiteInfo",
|
||||||
initialState: {
|
initialState: {
|
||||||
stats: null
|
info: null
|
||||||
},
|
},
|
||||||
reducers: {
|
reducers: {
|
||||||
setStats: (state, action) => {
|
setInfo: (state, action) => {
|
||||||
state.stats = action.payload;
|
state.info = action.payload;
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
export const {setStats} = SiteInfoState.actions;
|
export const {setInfo} = SiteInfoState.actions;
|
||||||
export default SiteInfoState.reducer;
|
export default SiteInfoState.reducer;
|
||||||
@@ -3022,6 +3022,11 @@ cross-spawn@^7.0.2, cross-spawn@^7.0.3:
|
|||||||
shebang-command "^2.0.0"
|
shebang-command "^2.0.0"
|
||||||
which "^2.0.1"
|
which "^2.0.1"
|
||||||
|
|
||||||
|
crypto-js@^4.1.1:
|
||||||
|
version "4.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-4.1.1.tgz#9e485bcf03521041bd85844786b83fb7619736cf"
|
||||||
|
integrity sha512-o2JlM7ydqd3Qk9CA0L4NL6mTzU2sdx96a+oOfPu8Mkl/PK51vSyoi8/rQ8NknZtk44vq15lmhAj9CIAGwgeWKw==
|
||||||
|
|
||||||
crypto-random-string@^2.0.0:
|
crypto-random-string@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
|
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
|
||||||
|
|||||||
@@ -11,8 +11,12 @@ services:
|
|||||||
- redis
|
- redis
|
||||||
redis:
|
redis:
|
||||||
image: "redis:alpine"
|
image: "redis:alpine"
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
||||||
postgres:
|
postgres:
|
||||||
image: "postgres:14.1"
|
image: "postgres:14.1"
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
environment:
|
environment:
|
||||||
- "POSTGRES_DB=void"
|
- "POSTGRES_DB=void"
|
||||||
- "POSTGRES_HOST_AUTH_METHOD=trust"
|
- "POSTGRES_HOST_AUTH_METHOD=trust"
|
||||||
|
|||||||
Reference in New Issue
Block a user