add acounts

This commit is contained in:
Julian Freeman
2025-07-03 19:18:36 -04:00
commit 85b97a626c
20 changed files with 965 additions and 0 deletions

0
app/services/__init__.py Normal file
View File

72
app/services/drive.py Normal file
View File

@@ -0,0 +1,72 @@
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
import os
import json
# This file should be downloaded from Google Cloud Console
CLIENT_SECRETS_FILE = "credentials.json"
SCOPES = ["https://www.googleapis.com/auth/drive.file"]
def get_drive_service(credentials_info=None):
if isinstance(credentials_info, str):
credentials_info = json.loads(credentials_info)
creds = None
if credentials_info:
creds = Credentials.from_authorized_user_info(credentials_info, SCOPES)
if not creds or not creds.valid:
# This should not happen in a web flow if the user is authenticated.
# If it does, it means the credentials have expired or are invalid,
# and the user needs to re-authenticate.
# The API should handle this by returning a 401 Unauthorized error.
return None
return build('drive', 'v3', credentials=creds)
def upload_file(drive_service, file_path, folder_id=None):
file_metadata = {
'name': os.path.basename(file_path)
}
if folder_id:
file_metadata['parents'] = [folder_id]
media = MediaFileUpload(file_path, resumable=True)
file = drive_service.files().create(
body=file_metadata,
media_body=media,
fields='id'
).execute()
return file.get('id')
def download_file(drive_service, file_id, destination):
request = drive_service.files().get_media(fileId=file_id)
with open(destination, "wb") as fh:
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print(f"Download {int(status.progress() * 100)}%.")
def authenticate(redirect_uri):
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, SCOPES, redirect_uri=redirect_uri
)
authorization_url, state = flow.authorization_url(
access_type='offline', include_granted_scopes='true'
)
# Store state in session or a temporary store to verify in the callback
# For now, we'll just return it. A real app should handle this securely.
return authorization_url, state
def exchange_code_for_credentials(code, redirect_uri, state):
flow = Flow.from_client_secrets_file(
CLIENT_SECRETS_FILE, SCOPES, redirect_uri=redirect_uri
)
# The state parameter should be verified against the value from the auth request
flow.fetch_token(code=code)
credentials = flow.credentials
return credentials

View File

@@ -0,0 +1,65 @@
import os
import shutil
import hashlib
import tempfile
import secrets
from fastapi import UploadFile
from typing import List
from .. import models
CHUNK_SIZE = 1024 * 1024 * 5 # 5MB
async def save_temp_file(file: UploadFile):
temp_dir = tempfile.mkdtemp()
file_path = os.path.join(temp_dir, file.filename)
sha256_hash = hashlib.sha256()
with open(file_path, "wb") as f:
while chunk := await file.read(CHUNK_SIZE):
f.write(chunk)
sha256_hash.update(chunk)
return file_path, sha256_hash.hexdigest()
def split_file(file_path: str, accounts: List[models.Account]):
# Simplified splitting logic for now
# In a real scenario, we'd split based on available space
parts = []
file_size = os.path.getsize(file_path)
num_accounts = len(accounts)
part_size = file_size // num_accounts
with open(file_path, "rb") as f:
for i, account in enumerate(accounts):
part_path = f"{file_path}.part{i}"
with open(part_path, "wb") as part_f:
remaining = part_size if i < num_accounts - 1 else -1
while remaining > 0 or remaining == -1:
read_size = min(CHUNK_SIZE, remaining) if remaining != -1 else CHUNK_SIZE
chunk = f.read(read_size)
if not chunk:
break
part_f.write(chunk)
if remaining != -1:
remaining -= len(chunk)
part_sha256 = hashlib.sha256(open(part_path, 'rb').read()).hexdigest()
parts.append({
"index": i,
"path": part_path,
"size": os.path.getsize(part_path),
"account": account,
"sha256": part_sha256
})
return parts
def generate_token():
return secrets.token_urlsafe(32)
def merge_files(part_paths: List[str], output_path: str):
with open(output_path, 'wb') as f_out:
for part_path in sorted(part_paths):
with open(part_path, 'rb') as f_in:
shutil.copyfileobj(f_in, f_out)