add acounts

This commit is contained in:
Julian Freeman
2025-07-03 19:18:36 -04:00
commit 85b97a626c
20 changed files with 965 additions and 0 deletions

0
app/__init__.py Normal file
View File

58
app/crud.py Normal file
View File

@@ -0,0 +1,58 @@
from sqlalchemy.orm import Session
from . import models, schemas
def get_user(db: Session, user_id: int):
return db.query(models.User).filter(models.User.id == user_id).first()
def get_user_by_email(db: Session, email: str):
return db.query(models.User).filter(models.User.email == email).first()
def create_user(db: Session, user: schemas.UserCreate):
db_user = models.User(email=user.email)
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def get_account(db: Session, account_id: int):
return db.query(models.Account).filter(models.Account.id == account_id).first()
def get_accounts(db: Session, user_id: int):
return db.query(models.Account).filter(models.Account.user_id == user_id).all()
def create_account(db: Session, account: schemas.AccountCreate, user_id: int):
db_account = models.Account(**account.dict(), user_id=user_id)
db.add(db_account)
db.commit()
db.refresh(db_account)
return db_account
def get_file_by_token(db: Session, token: str):
return db.query(models.File).filter(models.File.download_token == token).first()
def create_file(db: Session, file: schemas.FileCreate, user_id: int, parts: list):
db_file = models.File(
user_id=user_id,
filename=file.filename,
original_size=file.original_size,
sha256=file.sha256,
download_token=file.download_token
)
db.add(db_file)
db.commit()
db.refresh(db_file)
for part_data in parts:
db_part = models.FilePart(
file_id=db_file.id,
part_index=part_data['part_index'],
size=part_data['size'],
account_id=part_data['account_id'],
drive_file_id=part_data['drive_file_id'],
sha256=part_data['sha256']
)
db.add(db_part)
db.commit()
db.refresh(db_file)
return db_file

13
app/database.py Normal file
View File

@@ -0,0 +1,13 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from .models import Base
DATABASE_URL = "sqlite:///./multidrive.db"
engine = create_engine(
DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def init_db():
Base.metadata.create_all(bind=engine)

200
app/main.py Normal file
View File

@@ -0,0 +1,200 @@
from fastapi import FastAPI, Depends, HTTPException, UploadFile, File as FastAPIFile, Request
from fastapi.responses import StreamingResponse, RedirectResponse
from sqlalchemy.orm import Session
from . import crud, models, schemas
from .database import SessionLocal, init_db
from .services import drive, file_handler
from starlette.middleware.sessions import SessionMiddleware
from fastapi.middleware.cors import CORSMiddleware
from dotenv import load_dotenv
import os
load_dotenv()
app = FastAPI(
title="MultiDrive Box",
description="Union de varias cuentas de Google Drive para formar un único pool de almacenamiento.",
version="0.1.0"
)
# Add CORS middleware to allow cross-origin requests
# This is crucial for the frontend (e.g., running on port 3000)
# to communicate with the backend (running on port 8000).
origins = [
"http://localhost:3000",
"http://127.0.0.1:3000",
]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True, # Allow cookies to be sent and received
allow_methods=["*"],
allow_headers=["*"],
)
# Add session middleware with a fixed secret key
SECRET_KEY = os.getenv("SECRET_KEY")
if not SECRET_KEY:
raise ValueError("No SECRET_KEY set for session middleware.")
app.add_middleware(SessionMiddleware, secret_key=SECRET_KEY)
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.on_event("startup")
def on_startup():
init_db()
@app.get("/api/storage-status", response_model=schemas.StorageStatus)
def get_storage_status(db: Session = Depends(get_db)):
# Dummy user for now
user = crud.get_user(db, 1)
if not user:
user = crud.create_user(db, schemas.UserCreate(email="dummy@example.com"))
accounts = crud.get_accounts(db, user_id=user.id)
total_space = sum(acc.drive_space_total for acc in accounts)
used_space = sum(acc.drive_space_used for acc in accounts)
return {
"total_space": total_space,
"used_space": used_space,
"free_space": total_space - used_space,
"accounts": accounts
}
@app.post("/api/upload-file")
async def upload_file(file: UploadFile = FastAPIFile(...), db: Session = Depends(get_db)):
# Dummy user
user = crud.get_user(db, 1)
if not user:
user = crud.create_user(db, schemas.UserCreate(email="dummy@example.com"))
accounts = crud.get_accounts(db, user_id=user.id)
if not accounts:
raise HTTPException(status_code=400, detail="No Google Drive accounts linked.")
file_path, sha256 = await file_handler.save_temp_file(file)
parts = file_handler.split_file(file_path, accounts)
uploaded_parts = []
for part in parts:
drive_service = drive.get_drive_service(credentials_info=part['account'].credentials)
if not drive_service:
raise HTTPException(status_code=401, detail=f"Could not get drive service for account {part['account'].google_email}")
drive_file_id = drive_service.upload_file(part['path'], part['account'])
uploaded_parts.append({
"part_index": part['index'],
"size": part['size'],
"account_id": part['account'].id,
"drive_file_id": drive_file_id,
"sha256": part['sha256']
})
download_token = file_handler.generate_token()
file_data = schemas.FileCreate(
filename=file.filename,
original_size=file.size,
sha256=sha256,
download_token=download_token
)
crud.create_file(db, file=file_data, user_id=user.id, parts=uploaded_parts)
return {"download_link": f"/api/file/{download_token}"}
@app.get("/api/file/{token}", response_model=schemas.File)
def get_file_metadata(token: str, db: Session = Depends(get_db)):
db_file = crud.get_file_by_token(db, token)
if not db_file:
raise HTTPException(status_code=404, detail="File not found")
return db_file
from fastapi.responses import StreamingResponse
import shutil
import tempfile
@app.get("/api/file/{token}/download")
def download_file(token: str, db: Session = Depends(get_db)):
db_file = crud.get_file_by_token(db, token)
if not db_file:
raise HTTPException(status_code=404, detail="File not found")
temp_dir = tempfile.mkdtemp()
part_paths = []
for part in sorted(db_file.parts, key=lambda p: p.part_index):
account = crud.get_account(db, part.account_id)
drive_service = drive.get_drive_service(credentials_info=account.credentials)
if not drive_service:
raise HTTPException(status_code=401, detail=f"Could not get drive service for account {account.google_email}")
part_path = os.path.join(temp_dir, f"{db_file.filename}.part{part.part_index}")
drive.download_file(drive_service, part.drive_file_id, part_path)
part_paths.append(part_path)
merged_file_path = os.path.join(temp_dir, db_file.filename)
file_handler.merge_files(part_paths, merged_file_path)
def file_iterator(file_path):
with open(file_path, 'rb') as f:
yield from f
# Clean up temp files
shutil.rmtree(temp_dir)
return StreamingResponse(file_iterator(merged_file_path), media_type="application/octet-stream", headers={"Content-Disposition": f"attachment; filename={db_file.filename}"})
@app.get("/api/add-account")
def add_account(request: Request):
print("Received request for /api/add-account")
try:
redirect_uri = request.url_for('oauth2callback')
print(f"Redirect URI for oauth2callback: {redirect_uri}")
authorization_url, state = drive.authenticate(redirect_uri)
print(f"Generated authorization URL: {authorization_url}")
request.session['state'] = state
print(f"Stored state in session: {state}")
# Directly redirect the user's browser to Google's authorization page
return RedirectResponse(authorization_url)
except Exception as e:
print(f"Error in /api/add-account: {e}")
raise HTTPException(status_code=500, detail="Internal server error")
@app.get("/api/oauth2callback")
def oauth2callback(request: Request, code: str, state: str, db: Session = Depends(get_db)):
session_state = request.session.get('state')
print(f"Callback received. State from Google: {state}, State from session: {session_state}")
if not session_state or state != session_state:
print("State mismatch error!")
print(f"Session content: {request.session}")
raise HTTPException(status_code=400, detail="State mismatch")
redirect_uri = request.url_for('oauth2callback')
credentials = drive.exchange_code_for_credentials(code, redirect_uri, state)
drive_service = drive.get_drive_service(credentials_info=credentials.to_json())
about = drive_service.about().get(fields="user, storageQuota").execute()
user_info = about['user']
storage_quota = about['storageQuota']
# Dummy user
user = crud.get_user(db, 1)
if not user:
user = crud.create_user(db, schemas.UserCreate(email="dummy@example.com"))
account_data = schemas.AccountCreate(
google_email=user_info['emailAddress'],
credentials=credentials.to_json(),
drive_space_total=int(storage_quota.get('limit', 0)),
drive_space_used=int(storage_quota.get('usage', 0))
)
crud.create_account(db, account=account_data, user_id=user.id)
return RedirectResponse(url="/")

50
app/models.py Normal file
View File

@@ -0,0 +1,50 @@
from sqlalchemy import create_engine, Column, Integer, String, DateTime, ForeignKey, BigInteger, JSON
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
import datetime
Base = declarative_base()
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True)
created_at = Column(DateTime, default=datetime.datetime.utcnow)
accounts = relationship("Account", back_populates="user")
files = relationship("File", back_populates="user")
class Account(Base):
__tablename__ = "accounts"
id = Column(Integer, primary_key=True, index=True)
user_id = Column(Integer, ForeignKey("users.id"))
google_email = Column(String, unique=True, index=True)
credentials = Column(JSON)
drive_space_total = Column(BigInteger)
drive_space_used = Column(BigInteger)
drive_space_reserved = Column(BigInteger, default=1073741824) # 1GB reserved
user = relationship("User", back_populates="accounts")
file_parts = relationship("FilePart", back_populates="account")
class File(Base):
__tablename__ = "files"
id = Column(Integer, primary_key=True, index=True)
user_id = Column(Integer, ForeignKey("users.id"))
filename = Column(String)
original_size = Column(BigInteger)
sha256 = Column(String)
upload_time = Column(DateTime, default=datetime.datetime.utcnow)
download_token = Column(String, unique=True, index=True)
user = relationship("User", back_populates="files")
parts = relationship("FilePart", back_populates="file")
class FilePart(Base):
__tablename__ = "file_parts"
id = Column(Integer, primary_key=True, index=True)
file_id = Column(Integer, ForeignKey("files.id"))
part_index = Column(Integer)
size = Column(BigInteger)
account_id = Column(Integer, ForeignKey("accounts.id"))
drive_file_id = Column(String)
sha256 = Column(String)
file = relationship("File", back_populates="parts")
account = relationship("Account", back_populates="file_parts")

78
app/schemas.py Normal file
View File

@@ -0,0 +1,78 @@
from pydantic import BaseModel
from typing import List, Optional, Any
import datetime
class AccountBase(BaseModel):
google_email: str
class AccountCreate(AccountBase):
credentials: Any # Can be a dict or a JSON string
drive_space_total: int
drive_space_used: int
class Account(AccountBase):
id: int
user_id: int
credentials: Any
drive_space_total: int
drive_space_used: int
drive_space_reserved: int
class Config:
from_attributes = True
class FilePartBase(BaseModel):
part_index: int
size: int
drive_file_id: str
sha256: str
class FilePartCreate(FilePartBase):
pass
class FilePart(FilePartBase):
id: int
file_id: int
account_id: int
class Config:
from_attributes = True
class FileBase(BaseModel):
filename: str
original_size: int
sha256: str
class FileCreate(FileBase):
download_token: str
class File(FileBase):
id: int
user_id: int
upload_time: datetime.datetime
download_token: str
parts: List[FilePart] = []
class Config:
from_attributes = True
class UserBase(BaseModel):
email: str
class UserCreate(UserBase):
pass
class User(UserBase):
id: int
created_at: datetime.datetime
accounts: List[Account] = []
files: List[File] = []
class Config:
from_attributes = True
class StorageStatus(BaseModel):
total_space: int
used_space: int
free_space: int
accounts: List[Account]

0
app/services/__init__.py Normal file
View File

72
app/services/drive.py Normal file
View File

@@ -0,0 +1,72 @@
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
import os
import json
# This file should be downloaded from Google Cloud Console
CLIENT_SECRETS_FILE = "credentials.json"
SCOPES = ["https://www.googleapis.com/auth/drive.file"]
def get_drive_service(credentials_info=None):
if isinstance(credentials_info, str):
credentials_info = json.loads(credentials_info)
creds = None
if credentials_info:
creds = Credentials.from_authorized_user_info(credentials_info, SCOPES)
if not creds or not creds.valid:
# This should not happen in a web flow if the user is authenticated.
# If it does, it means the credentials have expired or are invalid,
# and the user needs to re-authenticate.
# The API should handle this by returning a 401 Unauthorized error.
return None
return build('drive', 'v3', credentials=creds)
def upload_file(drive_service, file_path, folder_id=None):
file_metadata = {
'name': os.path.basename(file_path)
}
if folder_id:
file_metadata['parents'] = [folder_id]
media = MediaFileUpload(file_path, resumable=True)
file = drive_service.files().create(
body=file_metadata,
media_body=media,
fields='id'
).execute()
return file.get('id')
def download_file(drive_service, file_id, destination):
request = drive_service.files().get_media(fileId=file_id)
with open(destination, "wb") as fh:
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print(f"Download {int(status.progress() * 100)}%.")
def authenticate(redirect_uri):
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, SCOPES, redirect_uri=redirect_uri
)
authorization_url, state = flow.authorization_url(
access_type='offline', include_granted_scopes='true'
)
# Store state in session or a temporary store to verify in the callback
# For now, we'll just return it. A real app should handle this securely.
return authorization_url, state
def exchange_code_for_credentials(code, redirect_uri, state):
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, SCOPES, redirect_uri=redirect_uri
)
# The state parameter should be verified against the value from the auth request
flow.fetch_token(code=code)
credentials = flow.credentials
return credentials

View File

@@ -0,0 +1,65 @@
import os
import shutil
import hashlib
import tempfile
import secrets
from fastapi import UploadFile
from typing import List
from .. import models
CHUNK_SIZE = 1024 * 1024 * 5 # 5MB
async def save_temp_file(file: UploadFile):
temp_dir = tempfile.mkdtemp()
file_path = os.path.join(temp_dir, file.filename)
sha256_hash = hashlib.sha256()
with open(file_path, "wb") as f:
while chunk := await file.read(CHUNK_SIZE):
f.write(chunk)
sha256_hash.update(chunk)
return file_path, sha256_hash.hexdigest()
def split_file(file_path: str, accounts: List[models.Account]):
# Simplified splitting logic for now
# In a real scenario, we'd split based on available space
parts = []
file_size = os.path.getsize(file_path)
num_accounts = len(accounts)
part_size = file_size // num_accounts
with open(file_path, "rb") as f:
for i, account in enumerate(accounts):
part_path = f"{file_path}.part{i}"
with open(part_path, "wb") as part_f:
remaining = part_size if i < num_accounts - 1 else -1
while remaining > 0 or remaining == -1:
read_size = min(CHUNK_SIZE, remaining) if remaining != -1 else CHUNK_SIZE
chunk = f.read(read_size)
if not chunk:
break
part_f.write(chunk)
if remaining != -1:
remaining -= len(chunk)
part_sha256 = hashlib.sha256(open(part_path, 'rb').read()).hexdigest()
parts.append({
"index": i,
"path": part_path,
"size": os.path.getsize(part_path),
"account": account,
"sha256": part_sha256
})
return parts
def generate_token():
return secrets.token_urlsafe(32)
def merge_files(part_paths: List[str], output_path: str):
with open(output_path, 'wb') as f_out:
for part_path in sorted(part_paths):
with open(part_path, 'rb') as f_in:
shutil.copyfileobj(f_in, f_out)