add acounts

This commit is contained in:
Julian Freeman
2025-07-03 19:18:36 -04:00
commit 85b97a626c
20 changed files with 965 additions and 0 deletions

101
.gitignore vendored Normal file
View File

@@ -0,0 +1,101 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
.pytest_cache/
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# celery beat
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
.idea
.vscode
venv/
ENV/
env/
# SQLite databases
*.sqlite3
*.db
# Credentials
credentials.json
# Reflex
.web

38
README.md Normal file
View File

@@ -0,0 +1,38 @@
# MultiDrive Box
This project combines multiple Google Drive accounts into a single storage pool. It supports smart chunking of large files, distributed storage across multiple accounts, and unified download.
## Setup
1. **Install dependencies:**
```bash
pip install -r requirements.txt
```
2. **Google Drive API Credentials:**
- Go to the [Google Cloud Console](https://console.cloud.google.com/).
- Create a new project.
- Enable the Google Drive API.
- Create OAuth 2.0 Client ID credentials for a "Desktop app".
- Download the `credentials.json` file and place it in the root of this project.
3. **Run the backend:**
```bash
uvicorn app.main:app --reload
```
4. **Run the frontend:**
```bash
cd reflex_app
reflex run
```
## How it Works
- **FastAPI Backend:** Handles file uploads, chunking, Google Drive integration, and database management.
- **Reflex Frontend:** Provides a web interface for uploading files and viewing storage status.
- **SQLite:** Stores metadata about users, accounts, files, and file parts.

0
app/__init__.py Normal file
View File

58
app/crud.py Normal file
View File

@@ -0,0 +1,58 @@
from sqlalchemy.orm import Session
from . import models, schemas
def get_user(db: Session, user_id: int):
return db.query(models.User).filter(models.User.id == user_id).first()
def get_user_by_email(db: Session, email: str):
return db.query(models.User).filter(models.User.email == email).first()
def create_user(db: Session, user: schemas.UserCreate):
db_user = models.User(email=user.email)
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def get_account(db: Session, account_id: int):
return db.query(models.Account).filter(models.Account.id == account_id).first()
def get_accounts(db: Session, user_id: int):
return db.query(models.Account).filter(models.Account.user_id == user_id).all()
def create_account(db: Session, account: schemas.AccountCreate, user_id: int):
db_account = models.Account(**account.dict(), user_id=user_id)
db.add(db_account)
db.commit()
db.refresh(db_account)
return db_account
def get_file_by_token(db: Session, token: str):
return db.query(models.File).filter(models.File.download_token == token).first()
def create_file(db: Session, file: schemas.FileCreate, user_id: int, parts: list):
db_file = models.File(
user_id=user_id,
filename=file.filename,
original_size=file.original_size,
sha256=file.sha256,
download_token=file.download_token
)
db.add(db_file)
db.commit()
db.refresh(db_file)
for part_data in parts:
db_part = models.FilePart(
file_id=db_file.id,
part_index=part_data['part_index'],
size=part_data['size'],
account_id=part_data['account_id'],
drive_file_id=part_data['drive_file_id'],
sha256=part_data['sha256']
)
db.add(db_part)
db.commit()
db.refresh(db_file)
return db_file

13
app/database.py Normal file
View File

@@ -0,0 +1,13 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from .models import Base
DATABASE_URL = "sqlite:///./multidrive.db"
engine = create_engine(
DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def init_db():
Base.metadata.create_all(bind=engine)

200
app/main.py Normal file
View File

@@ -0,0 +1,200 @@
from fastapi import FastAPI, Depends, HTTPException, UploadFile, File as FastAPIFile, Request
from fastapi.responses import StreamingResponse, RedirectResponse
from sqlalchemy.orm import Session
from . import crud, models, schemas
from .database import SessionLocal, init_db
from .services import drive, file_handler
from starlette.middleware.sessions import SessionMiddleware
from fastapi.middleware.cors import CORSMiddleware
from dotenv import load_dotenv
import os
load_dotenv()
app = FastAPI(
title="MultiDrive Box",
description="Union de varias cuentas de Google Drive para formar un único pool de almacenamiento.",
version="0.1.0"
)
# Add CORS middleware to allow cross-origin requests
# This is crucial for the frontend (e.g., running on port 3000)
# to communicate with the backend (running on port 8000).
origins = [
"http://localhost:3000",
"http://127.0.0.1:3000",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True, # Allow cookies to be sent and received
allow_methods=["*"],
allow_headers=["*"],
)
# Add session middleware with a fixed secret key
SECRET_KEY = os.getenv("SECRET_KEY")
if not SECRET_KEY:
raise ValueError("No SECRET_KEY set for session middleware.")
app.add_middleware(SessionMiddleware, secret_key=SECRET_KEY)
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.on_event("startup")
def on_startup():
init_db()
@app.get("/api/storage-status", response_model=schemas.StorageStatus)
def get_storage_status(db: Session = Depends(get_db)):
# Dummy user for now
user = crud.get_user(db, 1)
if not user:
user = crud.create_user(db, schemas.UserCreate(email="dummy@example.com"))
accounts = crud.get_accounts(db, user_id=user.id)
total_space = sum(acc.drive_space_total for acc in accounts)
used_space = sum(acc.drive_space_used for acc in accounts)
return {
"total_space": total_space,
"used_space": used_space,
"free_space": total_space - used_space,
"accounts": accounts
}
@app.post("/api/upload-file")
async def upload_file(file: UploadFile = FastAPIFile(...), db: Session = Depends(get_db)):
# Dummy user
user = crud.get_user(db, 1)
if not user:
user = crud.create_user(db, schemas.UserCreate(email="dummy@example.com"))
accounts = crud.get_accounts(db, user_id=user.id)
if not accounts:
raise HTTPException(status_code=400, detail="No Google Drive accounts linked.")
file_path, sha256 = await file_handler.save_temp_file(file)
parts = file_handler.split_file(file_path, accounts)
uploaded_parts = []
for part in parts:
drive_service = drive.get_drive_service(credentials_info=part['account'].credentials)
if not drive_service:
raise HTTPException(status_code=401, detail=f"Could not get drive service for account {part['account'].google_email}")
drive_file_id = drive_service.upload_file(part['path'], part['account'])
uploaded_parts.append({
"part_index": part['index'],
"size": part['size'],
"account_id": part['account'].id,
"drive_file_id": drive_file_id,
"sha256": part['sha256']
})
download_token = file_handler.generate_token()
file_data = schemas.FileCreate(
filename=file.filename,
original_size=file.size,
sha256=sha256,
download_token=download_token
)
crud.create_file(db, file=file_data, user_id=user.id, parts=uploaded_parts)
return {"download_link": f"/api/file/{download_token}"}
@app.get("/api/file/{token}", response_model=schemas.File)
def get_file_metadata(token: str, db: Session = Depends(get_db)):
db_file = crud.get_file_by_token(db, token)
if not db_file:
raise HTTPException(status_code=404, detail="File not found")
return db_file
from fastapi.responses import StreamingResponse
import shutil
import tempfile
@app.get("/api/file/{token}/download")
def download_file(token: str, db: Session = Depends(get_db)):
db_file = crud.get_file_by_token(db, token)
if not db_file:
raise HTTPException(status_code=404, detail="File not found")
temp_dir = tempfile.mkdtemp()
part_paths = []
for part in sorted(db_file.parts, key=lambda p: p.part_index):
account = crud.get_account(db, part.account_id)
drive_service = drive.get_drive_service(credentials_info=account.credentials)
if not drive_service:
raise HTTPException(status_code=401, detail=f"Could not get drive service for account {account.google_email}")
part_path = os.path.join(temp_dir, f"{db_file.filename}.part{part.part_index}")
drive.download_file(drive_service, part.drive_file_id, part_path)
part_paths.append(part_path)
merged_file_path = os.path.join(temp_dir, db_file.filename)
file_handler.merge_files(part_paths, merged_file_path)
def file_iterator(file_path):
with open(file_path, 'rb') as f:
yield from f
# Clean up temp files
shutil.rmtree(temp_dir)
return StreamingResponse(file_iterator(merged_file_path), media_type="application/octet-stream", headers={"Content-Disposition": f"attachment; filename={db_file.filename}"})
@app.get("/api/add-account")
def add_account(request: Request):
print("Received request for /api/add-account")
try:
redirect_uri = request.url_for('oauth2callback')
print(f"Redirect URI for oauth2callback: {redirect_uri}")
authorization_url, state = drive.authenticate(redirect_uri)
print(f"Generated authorization URL: {authorization_url}")
request.session['state'] = state
print(f"Stored state in session: {state}")
# Directly redirect the user's browser to Google's authorization page
return RedirectResponse(authorization_url)
except Exception as e:
print(f"Error in /api/add-account: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@app.get("/api/oauth2callback")
def oauth2callback(request: Request, code: str, state: str, db: Session = Depends(get_db)):
session_state = request.session.get('state')
print(f"Callback received. State from Google: {state}, State from session: {session_state}")
if not session_state or state != session_state:
print("State mismatch error!")
print(f"Session content: {request.session}")
raise HTTPException(status_code=400, detail="State mismatch")
redirect_uri = request.url_for('oauth2callback')
credentials = drive.exchange_code_for_credentials(code, redirect_uri, state)
drive_service = drive.get_drive_service(credentials_info=credentials.to_json())
about = drive_service.about().get(fields="user, storageQuota").execute()
user_info = about['user']
storage_quota = about['storageQuota']
# Dummy user
user = crud.get_user(db, 1)
if not user:
user = crud.create_user(db, schemas.UserCreate(email="dummy@example.com"))
account_data = schemas.AccountCreate(
google_email=user_info['emailAddress'],
credentials=credentials.to_json(),
drive_space_total=int(storage_quota.get('limit', 0)),
drive_space_used=int(storage_quota.get('usage', 0))
)
crud.create_account(db, account=account_data, user_id=user.id)
return RedirectResponse(url="/")

50
app/models.py Normal file
View File

@@ -0,0 +1,50 @@
from sqlalchemy import create_engine, Column, Integer, String, DateTime, ForeignKey, BigInteger, JSON
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
import datetime
Base = declarative_base()
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True)
created_at = Column(DateTime, default=datetime.datetime.utcnow)
accounts = relationship("Account", back_populates="user")
files = relationship("File", back_populates="user")
class Account(Base):
__tablename__ = "accounts"
id = Column(Integer, primary_key=True, index=True)
user_id = Column(Integer, ForeignKey("users.id"))
google_email = Column(String, unique=True, index=True)
credentials = Column(JSON)
drive_space_total = Column(BigInteger)
drive_space_used = Column(BigInteger)
drive_space_reserved = Column(BigInteger, default=1073741824) # 1GB reserved
user = relationship("User", back_populates="accounts")
file_parts = relationship("FilePart", back_populates="account")
class File(Base):
__tablename__ = "files"
id = Column(Integer, primary_key=True, index=True)
user_id = Column(Integer, ForeignKey("users.id"))
filename = Column(String)
original_size = Column(BigInteger)
sha256 = Column(String)
upload_time = Column(DateTime, default=datetime.datetime.utcnow)
download_token = Column(String, unique=True, index=True)
user = relationship("User", back_populates="files")
parts = relationship("FilePart", back_populates="file")
class FilePart(Base):
__tablename__ = "file_parts"
id = Column(Integer, primary_key=True, index=True)
file_id = Column(Integer, ForeignKey("files.id"))
part_index = Column(Integer)
size = Column(BigInteger)
account_id = Column(Integer, ForeignKey("accounts.id"))
drive_file_id = Column(String)
sha256 = Column(String)
file = relationship("File", back_populates="parts")
account = relationship("Account", back_populates="file_parts")

78
app/schemas.py Normal file
View File

@@ -0,0 +1,78 @@
from pydantic import BaseModel
from typing import List, Optional, Any
import datetime
class AccountBase(BaseModel):
google_email: str
class AccountCreate(AccountBase):
credentials: Any # Can be a dict or a JSON string
drive_space_total: int
drive_space_used: int
class Account(AccountBase):
id: int
user_id: int
credentials: Any
drive_space_total: int
drive_space_used: int
drive_space_reserved: int
class Config:
from_attributes = True
class FilePartBase(BaseModel):
part_index: int
size: int
drive_file_id: str
sha256: str
class FilePartCreate(FilePartBase):
pass
class FilePart(FilePartBase):
id: int
file_id: int
account_id: int
class Config:
from_attributes = True
class FileBase(BaseModel):
filename: str
original_size: int
sha256: str
class FileCreate(FileBase):
download_token: str
class File(FileBase):
id: int
user_id: int
upload_time: datetime.datetime
download_token: str
parts: List[FilePart] = []
class Config:
from_attributes = True
class UserBase(BaseModel):
email: str
class UserCreate(UserBase):
pass
class User(UserBase):
id: int
created_at: datetime.datetime
accounts: List[Account] = []
files: List[File] = []
class Config:
from_attributes = True
class StorageStatus(BaseModel):
total_space: int
used_space: int
free_space: int
accounts: List[Account]

0
app/services/__init__.py Normal file
View File

72
app/services/drive.py Normal file
View File

@@ -0,0 +1,72 @@
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
import os
import json
# This file should be downloaded from Google Cloud Console
CLIENT_SECRETS_FILE = "credentials.json"
SCOPES = ["https://www.googleapis.com/auth/drive.file"]
def get_drive_service(credentials_info=None):
if isinstance(credentials_info, str):
credentials_info = json.loads(credentials_info)
creds = None
if credentials_info:
creds = Credentials.from_authorized_user_info(credentials_info, SCOPES)
if not creds or not creds.valid:
# This should not happen in a web flow if the user is authenticated.
# If it does, it means the credentials have expired or are invalid,
# and the user needs to re-authenticate.
# The API should handle this by returning a 401 Unauthorized error.
return None
return build('drive', 'v3', credentials=creds)
def upload_file(drive_service, file_path, folder_id=None):
file_metadata = {
'name': os.path.basename(file_path)
}
if folder_id:
file_metadata['parents'] = [folder_id]
media = MediaFileUpload(file_path, resumable=True)
file = drive_service.files().create(
body=file_metadata,
media_body=media,
fields='id'
).execute()
return file.get('id')
def download_file(drive_service, file_id, destination):
request = drive_service.files().get_media(fileId=file_id)
with open(destination, "wb") as fh:
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print(f"Download {int(status.progress() * 100)}%.")
def authenticate(redirect_uri):
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, SCOPES, redirect_uri=redirect_uri
)
authorization_url, state = flow.authorization_url(
access_type='offline', include_granted_scopes='true'
)
# Store state in session or a temporary store to verify in the callback
# For now, we'll just return it. A real app should handle this securely.
return authorization_url, state
def exchange_code_for_credentials(code, redirect_uri, state):
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, SCOPES, redirect_uri=redirect_uri
)
# The state parameter should be verified against the value from the auth request
flow.fetch_token(code=code)
credentials = flow.credentials
return credentials

View File

@@ -0,0 +1,65 @@
import os
import shutil
import hashlib
import tempfile
import secrets
from fastapi import UploadFile
from typing import List
from .. import models
CHUNK_SIZE = 1024 * 1024 * 5 # 5MB
async def save_temp_file(file: UploadFile):
temp_dir = tempfile.mkdtemp()
file_path = os.path.join(temp_dir, file.filename)
sha256_hash = hashlib.sha256()
with open(file_path, "wb") as f:
while chunk := await file.read(CHUNK_SIZE):
f.write(chunk)
sha256_hash.update(chunk)
return file_path, sha256_hash.hexdigest()
def split_file(file_path: str, accounts: List[models.Account]):
# Simplified splitting logic for now
# In a real scenario, we'd split based on available space
parts = []
file_size = os.path.getsize(file_path)
num_accounts = len(accounts)
part_size = file_size // num_accounts
with open(file_path, "rb") as f:
for i, account in enumerate(accounts):
part_path = f"{file_path}.part{i}"
with open(part_path, "wb") as part_f:
remaining = part_size if i < num_accounts - 1 else -1
while remaining > 0 or remaining == -1:
read_size = min(CHUNK_SIZE, remaining) if remaining != -1 else CHUNK_SIZE
chunk = f.read(read_size)
if not chunk:
break
part_f.write(chunk)
if remaining != -1:
remaining -= len(chunk)
part_sha256 = hashlib.sha256(open(part_path, 'rb').read()).hexdigest()
parts.append({
"index": i,
"path": part_path,
"size": os.path.getsize(part_path),
"account": account,
"sha256": part_sha256
})
return parts
def generate_token():
return secrets.token_urlsafe(32)
def merge_files(part_paths: List[str], output_path: str):
with open(output_path, 'wb') as f_out:
for part_path in sorted(part_paths):
with open(part_path, 'rb') as f_in:
shutil.copyfileobj(f_in, f_out)

187
multidrive_box_prd.md Normal file
View File

@@ -0,0 +1,187 @@
# 🗃️ MultiDrive Box PRD (Product Requirements Document)
## 🔐 一、产品概述
**MultiDrive Box** 是一个前后端一体的系统,用于将多个 Google Drive 账号的免费空间联合成一个统一存储池,支持大文件智能分片上传、多账号分布存储、联合下载以及可共享访问。
用户只需上传文件,系统会自动完成压缩、分割、分发与元数据管理。下载者通过一个链接即可还原原始文件,无需了解后台逻辑。
---
## 🌟 二、核心功能需求
### 用户角度
#### 文件上传
- 登录并绑定多个 Google Drive 账号
- 拖拽或选择一个大文件上传
- 系统自动压缩(可选)、分片、上传到各账号
- 显示上传进度
- 上传成功后生成唯一分享链接
#### 文件下载
- 打开分享链接,显示文件名称、大小、分片信息
- 点击下载,系统联合各账号下载分片并合并
- 下载后校验文件整体性
#### 账号管理
- 添加/移除 Google 账号
- 查看每个账号剩余空间
- 查看每个账号中存在的文件分片
---
## 🛠️ 三、系统架构设计
```
用户浏览器
↕️ (HTTP)
Reflex 前端 (Web)
↕️
FastAPI 后端 (REST API)
↕️
SQLite 数据库 + Google Drive API
```
---
## ⚙️ 四、技术标准
| 组件 | 技术选型 |
| ---------------- | ------------------------------------- |
| 后端框架 | FastAPI |
| 数据库 | SQLite3 |
| 文件分片 | Python: shutil, os, hashlib, tempfile |
| Google Drive API | google-api-python-client |
| 前端框架 | Reflex (Python Web UI) |
| 授权管理 | Google OAuth2 |
| 部署 | Docker / 本地部署 / Cloudflare Tunnel |
---
## 📊 五、数据库设计 (SQLite)
### 1. users
```sql
id INTEGER PRIMARY KEY
email TEXT
created_at DATETIME
```
### 2. accounts
```sql
id INTEGER PRIMARY KEY
user_id INTEGER
google_email TEXT
access_token TEXT
refresh_token TEXT
token_expiry DATETIME
drive_space_total INTEGER
drive_space_used INTEGER
drive_space_reserved INTEGER DEFAULT 1073741824 -- 1GB 预留
```
### 3. files
```sql
id INTEGER PRIMARY KEY
user_id INTEGER
filename TEXT
original_size INTEGER
sha256 TEXT
upload_time DATETIME
download_token TEXT UNIQUE
```
### 4. file\_parts
```sql
id INTEGER PRIMARY KEY
file_id INTEGER
part_index INTEGER
size INTEGER
account_id INTEGER
drive_file_id TEXT
sha256 TEXT
```
---
## 🔐 六、API 设计 (FastAPI)
| Method | Endpoint | 描述 |
| ------ | -------------------------- | -------------------- |
| GET | /api/storage-status | 获取所有账号剩余空间 |
| POST | /api/upload-file | 文件上传与分片 |
| GET | /api/file/{token} | 获取分享文件元数据 |
| GET | /api/file/{token}/download | 联合下载文件 |
| POST | /api/add-account | 添加 Google 账号 (OAuth) |
| POST | /api/remove-account | 移除账号 |
---
## 🖼️ 七、Reflex 前端页面设计
### 1. 登录与账号管理
- 显示已绑定账号和空间
- 添加账号Google OAuth2
- 支持移除
### 2. 文件上传
- 拖拽上传区域
- 上传进度条
- 成功后显示分享链接
### 3. 分享页面
- 显示文件名、大小、分片数
- 点击下载合并文件
---
## 🧐 八、上传策略
### 分片规则
1. 获取所有账号剩余空间(-预留1GB
2. 按空间比例分片
3. 使用 Python tempfile 创建分片文件
4. 分别上传到 Google Drive (MultidriveBox/ 文件夹)
5. 记录分片 ID 和元数据
---
## 📁 九、下载策略
1. 通过分享链接加载分片元数据
2. 使用 access token 输出文件分片
3. 合并成完整文件
4. 下载前校验 SHA256
---
## 🦜 十、測試用例
- 上传 <14GB 文件,仅使用一个账号
- 上传 14.5GB 大文件,分片上传到多账号
- 没有剩余空间的规避策略
- 分享链接是否可用,不同浏览器是否适配
- 下载合并文件是否完整
- 删除账号后,文件是否仍可下载
---
## 🚀 十一、后续功能设想
- 多云支持Dropbox, OneDrive
- 分片加密 / 下载解密
- WebDAV 模拟磁盘
- 用户系统自动分配账号

6
reflex_app/.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
*.db
assets/external/
__pycache__/
*.py[cod]
.states
.web

View File

View File

@@ -0,0 +1,81 @@
import reflex as rx
import requests
API_URL = "http://127.0.0.1:8000"
class State(rx.State):
storage_status: dict = {}
upload_files: list[str]
download_link: str = ""
@rx.var
def total_space_gb(self) -> str:
total = self.storage_status.get("total_space", 0)
return f"{total / (1024**3):.2f} GB"
@rx.var
def used_space_gb(self) -> str:
used = self.storage_status.get("used_space", 0)
return f"{used / (1024**3):.2f} GB"
@rx.var
def free_space_gb(self) -> str:
free = self.storage_status.get("free_space", 0)
return f"{free / (1024**3):.2f} GB"
def get_storage_status(self):
try:
response = requests.get(f"{API_URL}/api/storage-status")
response.raise_for_status()
self.storage_status = response.json()
except requests.exceptions.RequestException as e:
print(f"Error fetching storage status: {e}")
async def handle_upload(self, files: list[rx.UploadFile]):
for file in files:
upload_data = await file.read()
files = {'file': (file.filename, upload_data, file.content_type)}
try:
response = requests.post(f"{API_URL}/api/upload-file", files=files)
response.raise_for_status()
self.download_link = response.json().get("download_link", "")
except requests.exceptions.RequestException as e:
print(f"Error uploading file: {e}")
def index():
return rx.container(
rx.heading("MultiDrive Box", size="9"),
rx.hstack(
rx.button("Refresh Status", on_click=State.get_storage_status),
rx.link(
rx.button("Add Account"),
href=f"{API_URL}/api/add-account",
is_external=True,
),
),
rx.box(
rx.text(f"Total Space: {State.total_space_gb}"),
rx.text(f"Used Space: {State.used_space_gb}"),
rx.text(f"Free Space: {State.free_space_gb}"),
),
rx.upload(
rx.text("Drag and drop files here or click to select files."),
id="upload",
),
rx.button(
"Upload", on_click=State.handle_upload(rx.upload_files(upload_id="upload"))
),
rx.cond(
State.download_link,
rx.link(
"Download File",
href=API_URL + State.download_link,
is_external=True
)
),
padding="2em",
)
app = rx.App()
app.add_page(index)

View File

@@ -0,0 +1,2 @@
reflex==0.8.0

5
reflex_app/rxconfig.py Normal file
View File

@@ -0,0 +1,5 @@
import reflex as rx
config = rx.Config(
app_name="reflex_app",
)

9
requirements.txt Normal file
View File

@@ -0,0 +1,9 @@
fastapi
uvicorn[standard]
sqlalchemy
google-api-python-client
google-auth-httplib2
google-auth-oauthlib
reflex
python-dotenv
itsdangerous

0
tests/__init__.py Normal file
View File

0
tests/test_main.py Normal file
View File