Initial commit

This commit is contained in:
Peter Stockings
2026-01-01 16:11:06 +11:00
commit 8bcf8a43fe
46 changed files with 3722 additions and 0 deletions

View File

@@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Torrent Client</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

View File

@@ -0,0 +1,35 @@
{
"name": "@torrent-client/client",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
"preview": "vite preview"
},
"dependencies": {
"@torrent-client/shared": "*",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"lucide-react": "^0.284.0",
"framer-motion": "^10.16.4",
"clsx": "^2.0.0",
"tailwind-merge": "^1.14.0"
},
"devDependencies": {
"@types/react": "^18.2.15",
"@types/react-dom": "^18.2.7",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
"@vitejs/plugin-react": "^4.0.3",
"autoprefixer": "^10.4.14",
"eslint": "^8.45.0",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.3",
"postcss": "^8.4.27",
"tailwindcss": "^3.3.3",
"vite": "^4.4.5"
}
}

View File

@@ -0,0 +1,6 @@
export default {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

348
packages/client/src/App.tsx Normal file
View File

@@ -0,0 +1,348 @@
import { useState, useCallback, useEffect } from 'react'
import { Magnet, Info, CheckCircle2, Search, Activity, Network, FileText, Globe, ArrowRight, Trash2, Plus, Zap, Shield, BarChart3, DownloadCloud, Box, Layers, Play } from 'lucide-react'
import { motion, AnimatePresence } from 'framer-motion'
interface TorrentSession {
hash: string;
name: string;
status: 'discovering' | 'ready' | 'downloading' | 'paused' | 'completed' | 'error';
progress: number;
peers: number;
activeConnections: number;
files: { name: string, size: number }[];
errorMessage: string | null;
}
function App() {
const [magnetInput, setMagnetInput] = useState('')
const [error, setError] = useState<string | null>(null)
const [torrents, setTorrents] = useState<TorrentSession[]>([])
const [isAdding, setIsAdding] = useState(false)
const fetchTorrents = useCallback(async () => {
try {
const response = await fetch('/api/torrents')
if (response.ok) {
const data = await response.json()
setTorrents(data)
}
} catch (err) {
console.error('Failed to fetch torrents:', err)
}
}, [])
useEffect(() => {
fetchTorrents()
const interval = setInterval(fetchTorrents, 2000)
return () => clearInterval(interval)
}, [fetchTorrents])
const handleAddTorrent = async () => {
if (!magnetInput.trim()) return
setError(null)
try {
const response = await fetch('/api/torrents', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ magnetURI: magnetInput.trim() })
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to add torrent')
}
setMagnetInput('')
setIsAdding(false)
fetchTorrents()
} catch (err: any) {
setError(err.message)
}
}
const startDownload = async (hash: string) => {
await fetch(`/api/torrents/${hash}/start`, { method: 'POST' })
fetchTorrents()
}
const removeTorrent = async (hash: string) => {
await fetch(`/api/torrents/${hash}`, { method: 'DELETE' })
fetchTorrents()
}
const getStatusColor = (status: string) => {
switch (status) {
case 'completed': return 'text-emerald-400 bg-emerald-500/10 border-emerald-500/20';
case 'downloading': return 'text-blue-400 bg-blue-500/10 border-blue-500/20';
case 'error': return 'text-rose-400 bg-rose-500/10 border-rose-500/20';
case 'paused': return 'text-amber-400 bg-amber-500/10 border-amber-500/20';
default: return 'text-slate-400 bg-slate-500/10 border-slate-500/20';
}
}
return (
<div className="w-full max-w-7xl px-6 py-12 selection:bg-blue-500/30">
{/* Header */}
<header className="flex items-center justify-between mb-16">
<div className="flex items-center gap-4">
<div className="relative group">
<div className="absolute inset-0 bg-blue-600 blur-xl opacity-20 group-hover:opacity-40 transition-opacity duration-500" />
<div className="relative w-12 h-12 bg-gradient-to-br from-blue-600 to-indigo-600 rounded-2xl flex items-center justify-center shadow-2xl shadow-blue-500/20 border border-white/10 group-hover:scale-105 transition-transform duration-300">
<Zap className="text-white fill-white" size={24} />
</div>
</div>
<div>
<h1 className="text-3xl font-black tracking-tight text-white mb-1">
GRAVITY <span className="text-transparent bg-clip-text bg-gradient-to-r from-blue-400 to-indigo-400">TORRENT</span>
</h1>
<div className="flex items-center gap-2 text-[11px] font-bold text-slate-500 tracking-widest uppercase">
<span className="w-1.5 h-1.5 rounded-full bg-emerald-500 shadow-[0_0_8px_rgba(16,185,129,0.5)] animate-pulse" />
System Operational
</div>
</div>
</div>
<motion.button
whileHover={{ scale: 1.02 }}
whileTap={{ scale: 0.98 }}
onClick={() => setIsAdding(true)}
className="flex items-center gap-2 px-6 py-3 bg-white text-slate-950 rounded-xl font-bold hover:bg-blue-50 transition-colors shadow-[0_0_20px_rgba(255,255,255,0.1)] hover:shadow-[0_0_25px_rgba(255,255,255,0.2)]"
>
<Plus size={18} strokeWidth={3} />
<span>Add Torrent</span>
</motion.button>
</header>
{/* Stats Dashboard */}
<div className="grid grid-cols-2 md:grid-cols-4 gap-4 mb-12">
{[
{ label: 'Network Activity', value: torrents.length + ' Sessions', icon: Activity, color: 'text-blue-400', bg: 'bg-blue-400/10' },
{ label: 'Swarm Peers', value: torrents.reduce((a, b) => a + b.peers, 0) + ' Connected', icon: Network, color: 'text-emerald-400', bg: 'bg-emerald-400/10' },
{ label: 'Global Progress', value: torrents.length ? (torrents.reduce((a, b) => a + b.progress, 0) / torrents.length).toFixed(0) + '%' : '0%', icon: Layers, color: 'text-indigo-400', bg: 'bg-indigo-400/10' },
{ label: 'Protocols', value: 'UDP / TCP', icon: Box, color: 'text-amber-400', bg: 'bg-amber-400/10' },
].map((stat, i) => (
<motion.div
key={i}
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: i * 0.1 }}
className="bg-slate-900/50 backdrop-blur-xl border border-white/5 p-5 rounded-3xl hover:border-white/10 transition-colors"
>
<div className="flex items-start justify-between mb-4">
<div className={`p-2.5 rounded-xl ${stat.bg}`}>
<stat.icon size={18} className={stat.color} />
</div>
<span className="text-[10px] uppercase font-bold text-slate-500 tracking-wider">0{i + 1}</span>
</div>
<p className="text-2xl font-bold text-white mb-1">{stat.value}</p>
<p className="text-[11px] font-medium text-slate-500 uppercase tracking-wide">{stat.label}</p>
</motion.div>
))}
</div>
{/* Torrents List */}
<div className="space-y-6">
<AnimatePresence mode="popLayout">
{torrents.length === 0 ? (
<motion.div
initial={{ opacity: 0, scale: 0.9 }}
animate={{ opacity: 1, scale: 1 }}
className="flex flex-col items-center justify-center py-40 border border-dashed border-slate-800/50 rounded-[2.5rem] bg-slate-900/20"
>
<div className="w-20 h-20 bg-slate-800/50 rounded-full flex items-center justify-center mb-6">
<DownloadCloud size={32} className="text-slate-600" />
</div>
<h3 className="text-xl font-bold text-slate-300 mb-2">No Active Swarms</h3>
<p className="text-slate-500 text-sm max-w-sm text-center leading-relaxed">
Initialize a new torrent session to begin the discovery and download process.
</p>
</motion.div>
) : (
torrents.map((torrent) => (
<motion.div
key={torrent.hash}
layout
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, scale: 0.95 }}
className="group bg-slate-900/40 backdrop-blur-md border border-white/5 rounded-[2rem] p-8 hover:border-blue-500/30 transition-all duration-500 shadow-xl shadow-black/20"
>
<div className="flex flex-col lg:flex-row gap-8 items-start lg:items-center">
{/* Main Info */}
<div className="flex-1 min-w-0">
<div className="flex items-center gap-3 mb-3">
<h3 className="text-xl font-bold text-white truncate pr-4">{torrent.name}</h3>
<span className={`px-3 py-1 rounded-full text-[10px] font-black uppercase tracking-wider border ${getStatusColor(torrent.status)}`}>
{torrent.status}
</span>
</div>
<div className="flex flex-wrap gap-6 text-xs text-slate-400 font-medium">
<div className="flex items-center gap-2 px-3 py-1.5 bg-slate-950/50 rounded-lg border border-white/5">
<Globe size={14} className="text-emerald-500" />
<span>{torrent.peers} peers linked</span>
</div>
<div className="flex items-center gap-2 px-3 py-1.5 bg-slate-950/50 rounded-lg border border-white/5">
<Network size={14} className="text-blue-500" />
<span>{torrent.activeConnections} active pipes</span>
</div>
<div className="flex items-center gap-2 px-3 py-1.5 bg-slate-950/50 rounded-lg border border-white/5 font-mono text-slate-500">
<span>HASH: {torrent.hash.slice(0, 12)}...</span>
</div>
</div>
</div>
{/* Progress Section */}
<div className="w-full lg:w-80">
<div className="flex justify-between text-[11px] font-bold text-slate-500 mb-2 uppercase tracking-wider">
<span>Completion</span>
<span className={torrent.status === 'completed' ? 'text-emerald-400' : 'text-blue-400'}>
{torrent.status === 'completed' ? 100 : torrent.progress}%
</span>
</div>
<div className="relative h-2.5 bg-slate-950 rounded-full overflow-hidden shadow-inner border border-white/5">
<motion.div
className={`absolute inset-y-0 left-0 ${torrent.status === 'completed' ? 'bg-gradient-to-r from-emerald-500 to-teal-400' : 'bg-gradient-to-r from-blue-600 to-indigo-500'}`}
initial={{ width: 0 }}
animate={{ width: `${torrent.status === 'completed' ? 100 : torrent.progress}%` }}
transition={{ type: 'spring', damping: 20 }}
/>
</div>
</div>
{/* Action Buttons */}
<div className="flex gap-3">
{torrent.status === 'ready' && (
<motion.button
whileHover={{ scale: 1.05 }}
whileTap={{ scale: 0.95 }}
onClick={() => startDownload(torrent.hash)}
className="w-12 h-12 rounded-2xl bg-blue-600 flex items-center justify-center text-white shadow-lg shadow-blue-600/20 hover:bg-blue-500 transition-colors"
>
<Play size={20} fill="currentColor" />
</motion.button>
)}
<motion.button
whileHover={{ scale: 1.05 }}
whileTap={{ scale: 0.95 }}
onClick={() => removeTorrent(torrent.hash)}
className="w-12 h-12 rounded-2xl bg-slate-800 flex items-center justify-center text-slate-400 hover:text-rose-400 hover:bg-slate-700 transition-colors"
>
<Trash2 size={20} />
</motion.button>
</div>
</div>
{/* File List */}
<AnimatePresence>
{torrent.files.length > 0 && (
<motion.div
initial={{ opacity: 0, height: 0 }}
animate={{ opacity: 1, height: 'auto' }}
className="mt-8 pt-8 border-t border-white/5 overflow-hidden"
>
<h4 className="text-[10px] font-bold text-slate-500 uppercase tracking-widest mb-4">Payload Contents</h4>
<div className="grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-3">
{torrent.files.map((file, i) => (
<div key={i} className="group/file flex items-center gap-4 bg-slate-950/30 hover:bg-slate-950/50 p-4 rounded-xl border border-white/5 hover:border-blue-500/20 transition-all duration-300">
<div className="p-2 bg-slate-800/50 rounded-lg text-slate-400 group-hover/file:text-blue-400 transition-colors">
<FileText size={16} />
</div>
<div className="min-w-0 flex-1">
<p className="text-sm font-semibold text-slate-300 truncate group-hover/file:text-white transition-colors">{file.name}</p>
<p className="text-[10px] font-medium text-slate-600">{(file.size / (1024 * 1024)).toFixed(2)} MB</p>
</div>
{torrent.status === 'completed' && (
<a
href={`/api/torrents/${torrent.hash}/download/${encodeURIComponent(file.name)}`}
target="_blank"
title="Download"
className="p-2 rounded-lg bg-blue-500/10 text-blue-400 hover:bg-blue-600 hover:text-white opacity-0 group-hover/file:opacity-100 transition-all transform scale-90 group-hover/file:scale-100"
>
<DownloadCloud size={16} />
</a>
)}
</div>
))}
</div>
</motion.div>
)}
</AnimatePresence>
</motion.div>
))
)}
</AnimatePresence>
</div>
{/* Modal */}
<AnimatePresence>
{isAdding && (
<div className="fixed inset-0 z-50 flex items-center justify-center p-6 bg-black/60 backdrop-blur-sm">
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
exit={{ opacity: 0 }}
onClick={() => setIsAdding(false)}
className="absolute inset-0"
/>
<motion.div
initial={{ opacity: 0, scale: 0.9, y: 20 }}
animate={{ opacity: 1, scale: 1, y: 0 }}
exit={{ opacity: 0, scale: 0.9, y: 20 }}
className="relative w-full max-w-2xl bg-[#0a0f1c] border border-white/10 rounded-[2rem] p-10 shadow-2xl shadow-black/50"
>
<h2 className="text-3xl font-black text-white mb-8 tracking-tight">Init Swarm Connection.</h2>
<div className="space-y-8">
<div>
<label className="text-xs font-bold text-slate-500 uppercase tracking-widest block mb-3">Target Magnet URI</label>
<div className="relative group">
<input
autoFocus
type="text"
value={magnetInput}
onChange={(e) => setMagnetInput(e.target.value)}
placeholder="magnet:?xt=urn:btih:..."
className="w-full bg-slate-950 border border-slate-800 rounded-2xl px-6 py-5 pr-14 text-slate-200 placeholder:text-slate-700 focus:outline-none focus:border-blue-500/50 focus:ring-4 focus:ring-blue-500/10 transition-all font-mono text-sm"
/>
<div className="absolute right-5 top-1/2 -translate-y-1/2 text-slate-700 group-focus-within:text-blue-500 transition-colors">
<Magnet size={24} />
</div>
</div>
{error && (
<motion.div
initial={{ opacity: 0, y: -5 }}
animate={{ opacity: 1, y: 0 }}
className="mt-3 flex items-center gap-2 text-rose-400 text-xs font-bold bg-rose-950/30 px-3 py-2 rounded-lg inline-block border border-rose-500/20"
>
<Info size={12} /> {error}
</motion.div>
)}
</div>
<div className="flex gap-4 pt-2">
<button
onClick={() => setIsAdding(false)}
className="flex-1 py-4 rounded-xl font-bold bg-slate-900 border border-white/5 text-slate-400 hover:bg-slate-800 hover:text-white transition-colors"
>
Cancel Protocol
</button>
<button
onClick={handleAddTorrent}
className="flex-[2] py-4 rounded-xl font-bold bg-gradient-to-r from-blue-600 to-indigo-600 text-white hover:shadow-lg hover:shadow-blue-600/20 hover:scale-[1.02] transition-all flex items-center justify-center gap-3"
>
<span>Establish Uplink</span>
<ArrowRight size={20} strokeWidth={2.5} />
</button>
</div>
</div>
</motion.div>
</div>
)}
</AnimatePresence>
</div>
)
}
export default App

View File

@@ -0,0 +1,53 @@
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700;800;900&display=swap');
@tailwind base;
@tailwind components;
@tailwind utilities;
:root {
font-family: 'Inter', system-ui, -apple-system, sans-serif;
line-height: 1.5;
font-weight: 400;
color-scheme: dark;
color: rgba(255, 255, 255, 0.92);
background-color: #030712;
/* Slate 950 base */
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
body {
margin: 0;
display: flex;
place-items: start;
justify-content: center;
min-width: 320px;
min-height: 100vh;
background-image:
radial-gradient(circle at 50% 0%, rgba(59, 130, 246, 0.15) 0%, transparent 50%),
radial-gradient(circle at 80% 10%, rgba(16, 185, 129, 0.1) 0%, transparent 40%);
background-attachment: fixed;
}
/* Custom Scrollbar */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: transparent;
}
::-webkit-scrollbar-thumb {
background: #1e293b;
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: #334155;
}

View File

@@ -0,0 +1,10 @@
import React from 'react'
import ReactDOM from 'react-dom/client'
import App from './App'
import './index.css'
ReactDOM.createRoot(document.getElementById('root')!).render(
<React.StrictMode>
<App />
</React.StrictMode>,
)

View File

@@ -0,0 +1,11 @@
/** @type {import('tailwindcss').Config} */
export default {
content: [
"./index.html",
"./src/**/*.{js,ts,jsx,tsx}",
],
theme: {
extend: {},
},
plugins: [],
}

View File

@@ -0,0 +1,31 @@
{
"compilerOptions": {
"target": "ESNext",
"useDefineForClassFields": true,
"lib": [
"DOM",
"DOM.Iterable",
"ESNext"
],
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": [
"src"
],
"references": [
{
"path": "./tsconfig.node.json"
}
]
}

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"composite": true,
"skipLibCheck": true,
"module": "ESNext",
"moduleResolution": "Node",
"allowSyntheticDefaultImports": true
},
"include": [
"vite.config.ts"
]
}

View File

@@ -0,0 +1,7 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()],
})

View File

@@ -0,0 +1,25 @@
{
"name": "@torrent-client/server",
"version": "1.0.0",
"private": true,
"type": "module",
"scripts": {
"start": "bun src/index.ts",
"dev": "bun --watch src/index.ts",
"build": "tsc",
"test": "bun test"
},
"dependencies": {
"@torrent-client/shared": "*",
"express": "^4.18.2",
"cors": "^2.8.5"
},
"devDependencies": {
"@types/express": "^4.17.17",
"@types/cors": "^2.8.13",
"@types/node": "^18.17.0",
"bun-types": "latest",
"ts-node": "^10.9.1",
"nodemon": "^3.0.1"
}
}

View File

@@ -0,0 +1,107 @@
import { expect, test, describe } from "bun:test";
import { TorrentSession } from "./engine";
import { Bitfield } from "@torrent-client/shared";
import crypto from 'node:crypto';
import { rmSync, existsSync } from 'node:fs';
import { join } from 'node:path';
describe("Download Integration", () => {
const MOCK_HASH = "A18230D43BDA105BE7DEF84CB711859018AAA92C"; // Dummy hash
const MAGNET = `magnet:?xt=urn:btih:${MOCK_HASH}&dn=Test`;
test("Full Handshake and Piece Request Flow", async () => {
// 1. Setup Mock Peer Server
let receivedHandshake = false;
let receivedRequest = false;
const server = Bun.listen({
hostname: "127.0.0.1",
port: 6881,
socket: {
data(s, data) {
let buf = data;
if (!receivedHandshake && buf.length >= 68 && buf[0] === 19) {
receivedHandshake = true;
// Send Handshake back
const reserved = Buffer.alloc(8);
const peerId = Buffer.alloc(20);
Buffer.from("-BT0001-TESTPEER").copy(peerId);
s.write(Buffer.concat([
Buffer.from([19]), Buffer.from("BitTorrent protocol"),
reserved, Buffer.from(MOCK_HASH, 'hex'), peerId
]));
// Send Bitfield (we have all pieces)
const bitfield = Buffer.from([255]);
const len = Buffer.alloc(4);
len.writeUInt32BE(bitfield.length + 1);
s.write(Buffer.concat([len, Buffer.from([5]), bitfield]));
// Unchoke
s.write(Buffer.from([0, 0, 0, 1, 1]));
buf = buf.slice(68);
}
while (buf.length >= 4) {
const msgLen = buf.readUInt32BE(0);
if (msgLen === 0) { buf = buf.slice(4); continue; }
if (buf.length < msgLen + 4) break;
const id = buf[4];
if (id === 6) {
receivedRequest = true;
// Send dummy piece back
const index = buf.readUInt32BE(5);
const block = Buffer.alloc(16384);
const payload = Buffer.alloc(16384 + 8);
payload.writeUInt32BE(index, 0);
payload.writeUInt32BE(0, 4);
block.copy(payload, 8);
const resLen = Buffer.alloc(4);
resLen.writeUInt32BE(payload.length + 1);
s.write(Buffer.concat([resLen, Buffer.from([7]), payload]));
}
buf = buf.slice(msgLen + 4);
}
}
}
});
// 2. Initialize Session
const session = new TorrentSession(MAGNET);
// Mock metadata discovery for test
(session as any).solvedMetadata = true;
(session as any).files = [{ name: "test.dat", size: 16384 }];
(session as any).pieceLength = 16384;
(session as any).totalSize = 16384; // Added this field
(session as any).bitfield = new Bitfield(1);
(session as any).pieceHashes = crypto.createHash('sha1').update(Buffer.alloc(16384)).digest();
(session as any).status = 'ready';
(session as any).peers = ["127.0.0.1:6881"];
// 3. Start Download
session.startDownload();
// 4. Wait for completion or timeout
let attempts = 0;
while (session.status !== 'completed' && attempts < 20) {
await new Promise(r => setTimeout(r, 200));
attempts++;
}
// 5. Assertions
expect(receivedHandshake).toBe(true);
expect(receivedRequest).toBe(true);
expect(session.status).toBe('completed');
expect(session.progress).toBe(100);
server.stop();
session.destroy();
// Cleanup
const downloadPath = join(process.cwd(), 'downloads', MOCK_HASH);
if (existsSync(downloadPath)) {
rmSync(downloadPath, { recursive: true, force: true });
}
}, 10000);
});

View File

@@ -0,0 +1,42 @@
import { expect, test, describe } from "bun:test";
import { TorrentSession } from "./engine";
describe("Real Swarm Download", () => {
// Debian 12.8.0 netinst magnet (highly seeded)
const REAL_MAGNET = "magnet:?xt=urn:btih:6a9759bffd5c0af65319979fb7832189f4f3c35d&dn=debian-12.8.0-amd64-netinst.iso";
test("Discover and download first block from real swarm", async () => {
const session = new TorrentSession(REAL_MAGNET);
console.log("[Test] Waiting for metadata discovery...");
let attempts = 0;
while (session.status === 'discovering' && attempts < 60) {
await new Promise(r => setTimeout(r, 1000));
attempts++;
}
expect(session.status).toBe('ready');
console.log(`[Test] Metadata discovered! ${session.files[0].name} (${session.files[0].size} bytes)`);
console.log("[Test] Starting download...");
session.startDownload();
// Wait for at least 1% progress or 1 active connection
let downloadAttempts = 0;
let foundPeers = false;
while (session.progress < 1 && downloadAttempts < 60) {
await new Promise(r => setTimeout(r, 1000));
if (session.activeConnections > 0) foundPeers = true;
if (session.progress > 0) break;
downloadAttempts++;
}
console.log(`[Test] Final Status: ${session.status}, Progress: ${session.progress}%, Active: ${session.activeConnections}`);
expect(foundPeers).toBe(true);
// We don't strictly expect progress > 0 because peers might be slow/choked,
// but we at least want to see active connections and INTERESTED signals in logs.
session.destroy();
}, 125000); // 2 minute timeout for real network
});

View File

@@ -0,0 +1,296 @@
import { parseMagnetURI, type MagnetData, Bitfield } from '@torrent-client/shared';
import { getPeersFromUDPTracker, fetchMetadataFromPeer, PUBLIC_TRACKERS } from './index';
import { StorageManager } from './storage';
import { PeerWorker } from './worker';
import { PieceReassembler } from './reassembler';
import crypto from 'node:crypto';
export type SessionStatus = 'discovering' | 'ready' | 'downloading' | 'paused' | 'completed' | 'error';
export interface TorrentFile {
name: string;
size: number;
}
export class TorrentSession {
public hash: string;
public magnetData: MagnetData;
public status: SessionStatus = 'discovering';
public files: TorrentFile[] = [];
public peers: string[] = [];
public activeConnections: number = 0;
public progress: number = 0;
public errorMessage: string | null = null;
private peerSet = new Set<string>();
private solvedMetadata = false;
private isDestroyed = false;
private storage?: StorageManager;
private bitfield?: Bitfield;
private pieceHashes?: Uint8Array;
private pieceLength: number = 0;
private totalSize: number = 0;
private workers: PeerWorker[] = [];
private reassemblers: Map<number, PieceReassembler> = new Map();
private peerIndex = 0;
private MAX_WORKERS = 30;
constructor(magnetURI: string) {
const parsed = parseMagnetURI(magnetURI);
if (!parsed?.hash) throw new Error("Invalid magnet link");
this.hash = parsed.hash;
this.magnetData = parsed;
this.startDiscovery();
}
private async startDiscovery() {
const trackers = [...new Set([...(this.magnetData.tr || []), ...PUBLIC_TRACKERS])].filter(t => t.startsWith("udp:"));
console.log(`[Engine] Session ${this.hash}: Starting discovery from ${trackers.length} trackers...`);
trackers.forEach(async (tracker) => {
try {
const trackerPeers = await getPeersFromUDPTracker(tracker, this.hash);
for (const peer of trackerPeers) {
if (this.isDestroyed || this.solvedMetadata || this.peerSet.has(peer)) continue;
this.peerSet.add(peer);
this.peers = Array.from(this.peerSet);
if (this.peerSet.size <= 50) {
this.tryFetchMetadata(peer);
}
// If we are already downloading, immediately try to use new peers
if (this.status === 'downloading') {
this.refillWorkers();
}
}
} catch (e) {
// Tracker error
}
});
setTimeout(() => {
if (!this.isDestroyed && this.status === 'discovering' && !this.solvedMetadata) {
this.status = 'error';
this.errorMessage = "Metadata discovery timed out.";
}
}, 30000);
}
private async tryFetchMetadata(peer: string) {
if (this.isDestroyed || this.solvedMetadata) return;
this.activeConnections++;
const [host, p] = peer.split(':');
try {
const res = await fetchMetadataFromPeer(this.hash, host, parseInt(p));
if (res && !this.solvedMetadata && !this.isDestroyed) {
this.solvedMetadata = true;
this.files = res.files || [{ name: res.name, size: res.size }];
this.pieceHashes = res.pieces;
this.pieceLength = res.pieceLength;
this.totalSize = res.size;
this.status = 'ready';
const totalPieces = Math.ceil(res.size / res.pieceLength);
this.bitfield = new Bitfield(totalPieces);
this.storage = new StorageManager(this.hash, this.files);
console.log(`[Engine] Session ${this.hash}: Metadata verified. ${totalPieces} pieces total.`);
}
} catch (e) {
// Peer failed
} finally {
this.activeConnections--;
}
}
public startDownload() {
if (this.status !== 'ready') return;
this.status = 'downloading';
console.log(`[Engine] Session ${this.hash}: Initializing swarm download...`);
this.refillWorkers();
}
private refillWorkers() {
if (this.status !== 'downloading' || this.isDestroyed) return;
while (this.workers.length < this.MAX_WORKERS && this.peerIndex < this.peers.length) {
const peer = this.peers[this.peerIndex++];
const [host, p] = peer.split(':');
const worker = new PeerWorker(
this.hash, host, parseInt(p),
this.bitfield!.totalPieces,
(idx, begin, block) => this.handleBlock(idx, begin, block),
() => this.onWorkerReady(worker),
() => this.onWorkerClose(worker)
);
this.workers.push(worker);
worker.connect();
}
}
private onWorkerReady(worker: PeerWorker) {
if (this.status !== 'downloading') return;
worker.signalInterest(); // Proactively tell them we want data!
this.activeConnections = this.workers.length;
this.scheduleWork();
}
private onWorkerClose(worker: PeerWorker) {
this.workers = this.workers.filter(w => w !== worker);
this.activeConnections = this.workers.length;
this.refillWorkers(); // Try to get a new peer
}
private scheduleWork() {
if (this.status !== 'downloading' || !this.bitfield) return;
const allPending = new Set<string>();
for (const w of this.workers) {
for (const p of w.getPendingRequests()) allPending.add(p);
}
for (const worker of this.workers) {
if (worker.getPendingRequests().size >= 5) continue;
// 1. Prioritize ongoing piece reassembly
let foundWork = false;
for (const [pieceIdx, reassembler] of this.reassemblers.entries()) {
if (worker.hasPiece(pieceIdx)) {
const missing = reassembler.getMissingBlocks();
for (const begin of missing) {
const key = `${pieceIdx}:${begin}`;
if (!allPending.has(key) && worker.getPendingRequests().size < 5) {
const blockSize = Math.min(16384, reassembler.totalSize - begin);
worker.requestBlock(pieceIdx, begin, blockSize);
allPending.add(key);
foundWork = true;
}
}
}
}
if (foundWork) continue;
// 2. Start a new piece
for (let i = 0; i < this.bitfield.totalPieces; i++) {
if (!this.bitfield.has(i) && !this.reassemblers.has(i) && worker.hasPiece(i)) {
const size = (i === this.bitfield.totalPieces - 1)
? (this.totalSize % this.pieceLength || this.pieceLength)
: this.pieceLength;
const re = new PieceReassembler(size);
this.reassemblers.set(i, re);
const missing = re.getMissingBlocks();
for (const begin of missing) {
const key = `${i}:${begin}`;
if (!allPending.has(key) && worker.getPendingRequests().size < 5) {
const blockSize = Math.min(16384, size - begin);
worker.requestBlock(i, begin, blockSize);
allPending.add(key);
}
}
break;
}
}
}
}
private handleBlock(index: number, begin: number, block: Buffer) {
if (!this.bitfield || this.bitfield.has(index)) return;
const reassembler = this.reassemblers.get(index);
if (!reassembler) return;
if (reassembler.addBlock(begin, block)) {
// Piece is complete!
const fullPiece = reassembler.getFullPiece();
if (!fullPiece) return;
const expectedHash = this.pieceHashes?.slice(index * 20, (index + 1) * 20);
const actualHash = crypto.createHash('sha1').update(fullPiece).digest();
if (expectedHash && Buffer.compare(actualHash, expectedHash) === 0) {
this.bitfield.set(index);
this.storage?.writePiece(index, this.pieceLength, fullPiece);
this.reassemblers.delete(index);
const completed = Array.from({ length: this.bitfield.totalPieces }).filter((_, i) => this.bitfield!.has(i)).length;
this.progress = Math.floor((completed / this.bitfield.totalPieces) * 100);
if (this.progress >= 100) {
this.status = 'completed';
console.log(`[Engine] Torrent ${this.hash} download COMPLETED!`);
}
} else {
// Hash failed, restart piece
this.reassemblers.delete(index);
}
}
this.scheduleWork();
}
public destroy() {
this.isDestroyed = true;
this.solvedMetadata = true;
this.status = 'paused';
this.workers.forEach(w => { /* close socket if possible */ });
console.log(`[Engine] Session ${this.hash}: Destroyed.`);
}
public toJSON() {
return {
hash: this.hash,
name: this.magnetData.dn || this.files[0]?.name || this.hash,
status: this.status,
progress: this.progress,
peers: this.peers.length,
activeConnections: this.workers.length,
files: this.files,
errorMessage: this.errorMessage
};
}
}
export class TorrentManager {
private static instance: TorrentManager;
private sessions = new Map<string, TorrentSession>();
private constructor() {}
public static getInstance(): TorrentManager {
if (!TorrentManager.instance) {
TorrentManager.instance = new TorrentManager();
}
return TorrentManager.instance;
}
public addTorrent(magnetURI: string): TorrentSession {
const parsed = parseMagnetURI(magnetURI);
if (!parsed?.hash) throw new Error("Invalid magnet");
if (this.sessions.has(parsed.hash)) {
return this.sessions.get(parsed.hash)!;
}
const session = new TorrentSession(magnetURI);
this.sessions.set(session.hash, session);
return session;
}
public getSession(hash: string): TorrentSession | undefined {
return this.sessions.get(hash);
}
public getAllSessions(): TorrentSession[] {
return Array.from(this.sessions.values());
}
public removeSession(hash: string) {
const s = this.sessions.get(hash);
if (s) s.destroy();
this.sessions.delete(hash);
}
}

View File

@@ -0,0 +1,317 @@
import { parseMagnetURI, BencodeDecoder, BencodeEncoder, MetadataReassembler } from "@torrent-client/shared";
import dgram from "node:dgram";
import crypto from "node:crypto";
import { join } from "node:path";
import { TorrentManager } from "./engine";
const port = process.env.PORT || 3001;
const getCONFIG = () => ({
TRACKER_TIMEOUT: parseInt(process.env.TRACKER_TIMEOUT || "3000"),
METADATA_TIMEOUT: parseInt(process.env.METADATA_TIMEOUT || "10000"),
});
export const PUBLIC_TRACKERS = [
"udp://tracker.opentrackr.org:1337/announce",
"udp://tracker.openbittorrent.com:6969/announce",
"udp://exodus.desync.com:6969/announce",
"udp://open.stealth.si:80/announce",
"udp://tracker.torrent.eu.org:451/announce"
];
console.log(`Torrent Engine (Bun) running at http://localhost:${port}`);
export async function getPeersFromUDPTracker(trackerUrl: string, infoHashHex: string): Promise<string[]> {
const url = new URL(trackerUrl);
if (url.protocol !== "udp:") return [];
return new Promise((resolve) => {
const client = dgram.createSocket("udp4");
const transactionId = crypto.randomBytes(4);
const connectionId = Buffer.from("0000041727101980", "hex");
const connectMsg = Buffer.concat([connectionId, Buffer.from([0, 0, 0, 0]), transactionId]);
const timeout = setTimeout(() => { client.close(); resolve([]); }, getCONFIG().TRACKER_TIMEOUT);
client.on("message", (msg) => {
const action = msg.readInt32BE(0);
const receivedTransactionId = msg.slice(4, 8);
if (!transactionId.equals(receivedTransactionId)) return;
if (action === 0) {
const receivedConnectionId = msg.slice(8, 16);
const announceMsg = Buffer.concat([
receivedConnectionId, Buffer.from([0, 0, 0, 1]), transactionId,
Buffer.from(infoHashHex, "hex"), Buffer.from("-BT0001-" + crypto.randomBytes(6).toString("hex")),
Buffer.alloc(8), Buffer.alloc(8), Buffer.alloc(8),
Buffer.from([0, 0, 0, 2]), Buffer.alloc(4), Buffer.alloc(4),
Buffer.from([0xFF, 0xFF, 0xFF, 0xFF]), Buffer.from([0x1B, 0x39])
]);
client.send(announceMsg, parseInt(url.port), url.hostname);
} else if (action === 1) {
const peers = [];
for (let i = 20; i + 6 <= msg.length; i += 6) {
peers.push(`${msg[i]}.${msg[i + 1]}.${msg[i + 2]}.${msg[i + 3]}:${msg.readUInt16BE(i + 4)}`);
}
clearTimeout(timeout); client.close(); resolve(peers);
}
});
client.send(connectMsg, parseInt(url.port), url.hostname, () => {});
});
}
/**
* Full BEP 9/10 Metadata Retrieval
*/
export async function fetchMetadataFromPeer(hash: string, host: string, portNum: number): Promise<{ name: string; size: number; pieces: Uint8Array; pieceLength: number; files?: any[] } | null> {
return new Promise((resolve) => {
let receivedHandshake = false;
let utMetadataId: number | null = null;
let metadataSize: number | null = null;
let reassembler: MetadataReassembler | null = null;
let buffer = Buffer.alloc(0);
let socketInstance: any = null;
let isSettled = false;
const timer = setTimeout(() => {
if (isSettled) return;
if (socketInstance) {
console.log(`[Metadata] Timeout reaching ${host}:${portNum}`);
socketInstance.end();
} else {
console.log(`[Metadata] Connection attempt to ${host}:${portNum} timed out`);
}
isSettled = true;
resolve(null);
}, getCONFIG().METADATA_TIMEOUT);
const finish = (result: any) => {
if (isSettled) return;
isSettled = true;
clearTimeout(timer);
resolve(result);
};
try {
const socket = Bun.connect({
hostname: host,
port: portNum,
socket: {
open(s: any) {
socketInstance = s;
console.log(`[Metadata] Socket open to ${host}:${portNum}`);
const reserved = Buffer.alloc(8);
reserved[5] |= 0x10; // Extension protocol
s.write(Buffer.concat([
Buffer.from([19]), Buffer.from("BitTorrent protocol"),
reserved, Buffer.from(hash, 'hex'), Buffer.from("-BT0001-" + crypto.randomBytes(6).toString("hex"))
]));
},
data(s: any, data: Buffer) {
buffer = Buffer.concat([buffer, data]);
if (!receivedHandshake) {
if (buffer.length >= 68 && buffer[0] === 19) {
receivedHandshake = true;
console.log(`[Metadata] Handshake with ${host} OK`);
buffer = buffer.slice(68);
// Signal interested + Extended Handshake
s.write(Buffer.from([0, 0, 0, 1, 2])); // interested
const extHandshake = BencodeEncoder.encode({ m: { ut_metadata: 1 } });
const msgLen = Buffer.alloc(4);
msgLen.writeUInt32BE(extHandshake.length + 2);
s.write(Buffer.concat([msgLen, Buffer.from([20, 0]), extHandshake]));
} else return;
}
while (buffer.length >= 4) {
const length = buffer.readUInt32BE(0);
if (length === 0) {
buffer = buffer.slice(4);
continue;
}
if (buffer.length < length + 4) break;
const msg = buffer.slice(4, length + 4);
buffer = buffer.slice(length + 4);
if (msg[0] === 20) {
const extId = msg[1];
const payload = msg.slice(2);
if (extId === 0) {
try {
const decoded = BencodeDecoder.decode(payload) as any;
utMetadataId = decoded.m?.ut_metadata;
metadataSize = decoded.metadata_size;
console.log(`[Metadata] ${host} metadata_size: ${metadataSize}, ut_metadata: ${utMetadataId}`);
if (utMetadataId !== undefined && utMetadataId !== null && metadataSize) {
reassembler = new MetadataReassembler(metadataSize);
console.log(`[Metadata] Requesting piece 0 from ${host}`);
const req = BencodeEncoder.encode({ msg_type: 0, piece: 0 });
const len = Buffer.alloc(4);
len.writeUInt32BE(req.length + 2);
s.write(Buffer.concat([len, Buffer.from([20, utMetadataId]), req]));
}
} catch (e) {
console.error(`[Metadata] Bencode error from ${host}`);
}
} else if (extId === 1 && reassembler) { // Message for OUR ut_metadata assignment
try {
const decoder = new BencodeDecoder(payload);
const dict = decoder.decode() as any;
const pieceData = payload.slice(decoder.getOffset());
console.log(`[Metadata] Received piece ${dict.piece} from ${host} (${pieceData.length} bytes)`);
if (dict.msg_type === 1) {
const complete = reassembler.addPiece(dict.piece, pieceData);
if (complete) {
console.log(`[Metadata] Fetched all info from ${host}!`);
const fullMetadata = reassembler.getFullMetadata();
if (fullMetadata) {
const info = BencodeDecoder.decode(fullMetadata) as any;
const files = info.files
? info.files.map((f: any) => ({
name: Array.isArray(f.path)
? f.path.map((p: any) => p instanceof Uint8Array ? new TextDecoder().decode(p) : p).join('/')
: (f.path instanceof Uint8Array ? new TextDecoder().decode(f.path) : f.path),
size: f.length
}))
: [{
name: info.name instanceof Uint8Array ? new TextDecoder().decode(info.name) : info.name,
size: info.length
}];
finish({
name: info.name instanceof Uint8Array ? new TextDecoder().decode(info.name) : info.name,
size: info.length || files.reduce((acc: number, f: any) => acc + f.size, 0),
pieces: info.pieces,
pieceLength: info['piece length'],
files
}); // Resolve before closing to prevent race
s.end();
}
} else {
console.log(`[Metadata] Requesting next piece from ${host}`);
const req = BencodeEncoder.encode({ msg_type: 0, piece: dict.piece + 1 });
const len = Buffer.alloc(4);
len.writeUInt32BE(req.length + 2);
s.write(Buffer.concat([len, Buffer.from([20, utMetadataId as number]), req]));
}
} else if (dict.msg_type === 2) {
console.log(`[Metadata] Peer ${host} rejected piece ${dict.piece}`);
}
} catch (e) {
console.log(`[Metadata] Error parsing data from ${host}`);
}
} else {
console.log(`[Metadata] Received unknown extension ${extId} from ${host}`);
}
}
}
},
close() { if (!isSettled) { console.log(`[Metadata] Connection closed by ${host}`); finish(null); } },
connectError() { if (!isSettled) { console.log(`[Metadata] Connect error to ${host}:${portNum}`); finish(null); } },
error(err: any) { if (!isSettled) { console.log(`[Metadata] Socket error from ${host}: ${err}`); finish(null); } }
}
});
} catch (e) {
console.log(`[Metadata] Bun.connect failed for ${host}: ${e}`);
finish(null);
}
});
}
export async function handleRequest(req: Request): Promise<Response> {
const url = new URL(req.url);
const headers = { "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Methods": "POST, OPTIONS", "Access-Control-Allow-Headers": "Content-Type" };
if (req.method === "OPTIONS") return new Response(null, { headers });
if (url.pathname === "/api/torrents" && req.method === "GET") {
const sessions = TorrentManager.getInstance().getAllSessions().map(s => s.toJSON());
return new Response(JSON.stringify(sessions), { headers });
}
if (url.pathname === "/api/torrents" && req.method === "POST") {
const { magnetURI } = await req.json();
try {
const session = TorrentManager.getInstance().addTorrent(magnetURI);
return new Response(JSON.stringify(session.toJSON()), { headers });
} catch (e: any) {
return new Response(JSON.stringify({ error: e.message }), { status: 400, headers });
}
}
if (url.pathname.startsWith("/api/torrents/") && url.pathname.endsWith("/start") && req.method === "POST") {
const hash = url.pathname.split("/")[3];
const session = TorrentManager.getInstance().getSession(hash);
if (session) {
session.startDownload();
return new Response(JSON.stringify({ success: true }), { headers });
}
return new Response(JSON.stringify({ error: "Session not found" }), { status: 404, headers });
}
if (url.pathname.startsWith("/api/torrents/") && req.method === "DELETE") {
const hash = url.pathname.split("/").pop();
if (hash) TorrentManager.getInstance().removeSession(hash);
return new Response(JSON.stringify({ success: true }), { headers });
}
// File Download Endpoint
const downloadMatch = url.pathname.match(/\/api\/torrents\/([^\/]+)\/download\/(.+)/);
if (downloadMatch && req.method === "GET") {
const hash = downloadMatch[1];
const filename = decodeURIComponent(downloadMatch[2]);
const session = TorrentManager.getInstance().getSession(hash);
if (!session) {
return new Response(JSON.stringify({ error: "Session not found" }), { status: 404, headers });
}
const filePath = join(process.cwd(), "downloads", hash, filename);
const file = Bun.file(filePath);
if (await file.exists()) {
return new Response(file, {
headers: {
...headers,
"Content-Disposition": `attachment; filename="${filename.split('/').pop()}"`
}
});
}
return new Response(JSON.stringify({ error: "File not found on disk" }), { status: 404, headers });
}
// Deprecated single-request metadata endpoint (maintained for compatibility)
if (url.pathname === "/api/metadata") {
const { magnetURI } = await req.json();
const session = TorrentManager.getInstance().addTorrent(magnetURI);
// Return current state (might be discovering)
return new Response(JSON.stringify({
message: session.status === 'ready' ? "Successfully fetched metadata!" : "Metadata retrieval in progress...",
parsed: session.magnetData,
peers: session.peers.slice(0, 10),
files: session.files
}), { headers });
}
const distPath = join(process.cwd(), "packages", "client", "dist");
const filePath = url.pathname === "/" ? "index.html" : url.pathname.slice(1);
const file = Bun.file(join(distPath, filePath));
if (await file.exists()) {
return new Response(file);
}
return new Response(Bun.file(join(distPath, "index.html")));
}
if (process.env.NODE_ENV !== "test") {
Bun.serve({
port: port,
fetch: handleRequest
});
}

View File

@@ -0,0 +1,33 @@
import { expect, test, describe } from "bun:test";
import { handleRequest } from "./index";
describe("Metadata API (Multi-Tracker)", () => {
test("aggregates peers from multiple trackers", async () => {
// Magnet with multiple trackers
const magnetURI = "magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=udp%3A%2F%2Fopen.stealth.si%3A80%2Fannounce&tr=udp%3A%2F%2Ftracker.torrent.eu.org%3A451%2Fannounce";
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI }),
headers: { "Content-Type": "application/json" }
});
console.log("[Test] Querying multiple trackers for aggregation...");
const res: Response = await handleRequest(req);
expect(res.status).toBe(200);
const body = await res.json();
console.log(`[Test] Total peers found: ${body.peers.length}`);
// We expect a decent number of unique peers if trackers are responsive
// Even if metadata retrieval fails, we want to see peer discovery working
expect(body.peers).toBeDefined();
// Check if duplicates are handled (internal logic verification)
const uniquePeers = new Set(body.peers);
expect(uniquePeers.size).toBe(body.peers.length);
// Message should reflect that we found peers
expect(body.message).toMatch(/Successfully fetched metadata!|Peers found but metadata retrieval timed out/);
}, 30000);
});

View File

@@ -0,0 +1,32 @@
import { expect, test, describe } from "bun:test";
import { handleRequest } from "./index";
describe("Metadata API (Real World)", () => {
test("successfully fetches metadata for a real magnet link", async () => {
// Snow Crash - Neal Stephenson (EPUB) - Very stable and highly seeded
const magnetURI = "magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337";
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI }),
headers: { "Content-Type": "application/json" }
});
console.log("[Test] Starting real-world metadata fetch (timeout 60s)...");
const res: Response = await handleRequest(req);
expect(res.status).toBe(200);
const body = await res.json();
console.log("[Test] API Response:", JSON.stringify(body, null, 2));
expect(body.message).toBe("Successfully fetched metadata!");
expect(body.files).toBeDefined();
expect(body.files.length).toBeGreaterThan(0);
// Verify file structure
const file = body.files[0];
expect(file.name).toBeDefined();
expect(file.size).toBeGreaterThan(0);
}, 60000); // 60s timeout for real network retrieval
});

View File

@@ -0,0 +1,31 @@
import { expect, test, describe } from "bun:test";
import { handleRequest } from "./index";
describe("Metadata API (Single File)", () => {
test("successfully fetches metadata for a single-file torrent", async () => {
// Ubuntu 22.04.3 Live Server AMD64 ISO - Standard single-file torrent
const magnetURI = "magnet:?xt=urn:btih:3e2de7a6d8590bb25b41097fa668045952fcc506&dn=ubuntu-22.04.3-live-server-amd64.iso&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337";
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI }),
headers: { "Content-Type": "application/json" }
});
console.log("[Test] Starting single-file metadata fetch...");
const res: Response = await handleRequest(req);
expect(res.status).toBe(200);
const body = await res.json();
console.log("[Test] API Response:", JSON.stringify(body, null, 2));
// Even if it times out on some networks, we check the logic branch if it succeeds
if (body.files.length > 0) {
expect(body.files.length).toBe(1);
expect(body.files[0].name).toMatch(/ubuntu/i);
expect(body.files[0].size).toBeGreaterThan(1000000000); // ~2GB
} else {
console.log("[Test] Metadata fetch timed out (expected on some restricted networks)");
}
}, 45000);
});

View File

@@ -0,0 +1,70 @@
// No global overrides to ensure clean integration test environment
import { expect, test, describe } from "bun:test";
import { handleRequest } from "./index";
describe("Metadata API", () => {
test("returns 400 for invalid magnet URI", async () => {
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI: "invalid-magnet" }),
headers: { "Content-Type": "application/json" }
});
const res: Response = await handleRequest(req);
expect(res.status).toBe(400);
const body = await res.json();
expect(body.error).toBe("Invalid magnet");
});
test("returns 'No peers found' for valid magnet with no trackers (simulated)", async () => {
// A truly random hash that 100% won't have peers
const randomHash = "1234567890abcdef1234567890abcdef" + Math.random().toString(16).slice(2, 10);
const validMagnet = `magnet:?xt=urn:btih:${randomHash}&dn=random.iso`;
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI: validMagnet }),
headers: { "Content-Type": "application/json" }
});
const res: Response = await handleRequest(req);
expect(res.status).toBe(200);
const body = await res.json();
// Message can be either "No peers found" or "Peers found but metadata retrieval timed out..."
expect(body.message).toMatch(/No peers found|Peers found but metadata retrieval timed out|All attempted peers failed/);
expect(body.files).toEqual([]);
// Verify parsing logic is also invoked
expect(body.parsed).toBeDefined();
expect(body.parsed.hash).toBeDefined();
expect(body.parsed.dn).toBe("random.iso");
}, 15000);
test("successfully parses a complex magnet URI", async () => {
const complexMagnet = "magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&xl=1024";
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI: complexMagnet }),
headers: { "Content-Type": "application/json" }
});
const res: Response = await handleRequest(req);
const body = await res.json();
expect(res.status).toBe(200);
expect(body.parsed).toBeDefined();
expect(body.parsed.hash).toBe("A18230D43BDA105BE7DEF84CB711859018AAA92D");
expect(body.parsed.dn).toBe("Snow Crash");
expect(body.parsed.tr).toContain("udp://tracker.opentrackr.org:1337");
expect(body.parsed.xl).toBe(1024);
}, 10000);
test("returns 200 for OPTIONS request (CORS)", async () => {
const req = new Request("http://localhost/api/metadata", {
method: "OPTIONS"
});
const res = await handleRequest(req);
expect(res.status).toBe(200);
expect(res.headers.get("Access-Control-Allow-Origin")).toBe("*");
});
});

View File

@@ -0,0 +1,37 @@
import { expect, test, describe } from "bun:test";
import { handleRequest } from "./index";
describe("Metadata API (Tracker Resilience)", () => {
test("handles timeouts from unresponsive trackers correctly", async () => {
// Magnet with one real tracker and two non-existent ones
const magnetURI = "magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&tr=udp%3A%2F%2F127.0.0.1%3A1%2Fannounce&tr=udp%3A%2F%2F10.255.255.1%3A6969%2Fannounce&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337";
const req = new Request("http://localhost/api/metadata", {
method: "POST",
body: JSON.stringify({ magnetURI }),
headers: { "Content-Type": "application/json" }
});
console.log("[Test] Querying mix of garbage and real trackers...");
const startTime = Date.now();
const res: Response = await handleRequest(req);
const duration = Date.now() - startTime;
expect(res.status).toBe(200);
const body = await res.json();
console.log(`[Test] Request completed in ${duration}ms`);
console.log(`[Test] Peers found: ${body.peers.length}`);
// The tracker timeout is 3s. Total time shouldn't exceed the meta-timeout (10s)
// significantly unless retrieval is actually happening.
expect(duration).toBeLessThan(45000);
// Even with 2 garbage trackers, we should find peers from opentrackr
if (body.peers.length === 0) {
console.log("[Test] Warning: No peers found in resilience test. This can happen if opentrackr is down.");
} else {
expect(body.peers.length).toBeGreaterThan(0);
}
}, 60000);
});

View File

@@ -0,0 +1,38 @@
export class PieceReassembler {
private blocks: Map<number, Uint8Array> = new Map();
public totalSize: number;
private blockSize: number = 16384;
constructor(totalSize: number) {
this.totalSize = totalSize;
}
public addBlock(begin: number, data: Uint8Array): boolean {
this.blocks.set(begin, data);
return this.isComplete();
}
public isComplete(): boolean {
const totalBlocks = Math.ceil(this.totalSize / this.blockSize);
return this.blocks.size === totalBlocks;
}
public getFullPiece(): Uint8Array | null {
if (!this.isComplete()) return null;
const fullData = new Uint8Array(this.totalSize);
for (const [begin, data] of this.blocks.entries()) {
fullData.set(data, begin);
}
return fullData;
}
public getMissingBlocks(): number[] {
const missing = [];
const totalBlocks = Math.ceil(this.totalSize / this.blockSize);
for (let i = 0; i < totalBlocks; i++) {
const begin = i * this.blockSize;
if (!this.blocks.has(begin)) missing.push(begin);
}
return missing;
}
}

View File

@@ -0,0 +1,65 @@
import { join } from 'node:path';
import { mkdirSync, openSync, writeSync, closeSync, existsSync } from 'node:fs';
export interface FileInfo {
name: string;
size: number;
}
export class StorageManager {
private baseDir: string;
private files: { name: string; size: number; offset: number }[] = [];
private totalSize: number = 0;
constructor(hash: string, fileList: FileInfo[]) {
this.baseDir = join(process.cwd(), 'downloads', hash);
if (!existsSync(this.baseDir)) {
mkdirSync(this.baseDir, { recursive: true });
}
let currentOffset = 0;
for (const f of fileList) {
this.files.push({ ...f, offset: currentOffset });
currentOffset += f.size;
const filePath = join(this.baseDir, f.name);
const dirPath = join(filePath, '..');
if (!existsSync(dirPath)) {
mkdirSync(dirPath, { recursive: true });
}
// Pre-create the file if it doesn't exist
if (!existsSync(filePath)) {
const fd = openSync(filePath, 'w');
closeSync(fd);
}
}
this.totalSize = currentOffset;
}
public writePiece(pieceIndex: number, pieceSize: number, data: Uint8Array) {
let pieceOffset = pieceIndex * pieceSize;
let bytesRemaining = data.length;
let dataOffset = 0;
for (const file of this.files) {
const fileEnd = file.offset + file.size;
// Check if this piece overlaps with this file
if (pieceOffset < fileEnd && (pieceOffset + bytesRemaining) > file.offset) {
const fileStartInPiece = Math.max(pieceOffset, file.offset);
const fileEndInPiece = Math.min(pieceOffset + bytesRemaining, fileEnd);
const writeSize = fileEndInPiece - fileStartInPiece;
const fileWriteOffset = fileStartInPiece - file.offset;
const dataReadOffset = dataOffset + (fileStartInPiece - pieceOffset);
const filePath = join(this.baseDir, file.name);
// Use 'r+' to allow seeking correctly on Windows
const fd = openSync(filePath, 'r+');
writeSync(fd, data, dataReadOffset, writeSize, fileWriteOffset);
closeSync(fd);
}
}
}
}

View File

@@ -0,0 +1,177 @@
import { Bitfield } from '@torrent-client/shared';
import crypto from 'node:crypto';
export class PeerWorker {
private socket: any = null;
private buffer = Buffer.alloc(0);
private handshakeDone = false;
private peerBitfield: Bitfield | null = null;
private amInterested = false;
private peerChoked = true;
private activeRequests = 0;
constructor(
private hash: string,
private host: string,
private port: number,
private totalPieces: number,
private onPiece: (index: number, begin: number, data: Buffer) => void,
private onReady: () => void,
private onClose: () => void
) {
this.peerBitfield = new Bitfield(totalPieces);
}
public connect() {
console.log(`[Worker] Connecting to ${this.host}:${this.port}...`);
try {
this.socket = Bun.connect({
hostname: this.host,
port: this.port,
socket: {
open: (s: any) => this.onOpen(s),
data: (s: any, d: Buffer) => this.onData(s, d),
close: () => {
console.log(`[Worker] Connection closed: ${this.host}`);
this.onClose();
},
connectError: () => {
console.log(`[Worker] Connect error: ${this.host}`);
this.onClose();
},
error: (e: any) => {
console.log(`[Worker] Socket error for ${this.host}:`, e);
this.onClose();
},
}
});
} catch (e) {
this.onClose();
}
}
public getHost() {
return this.host;
}
private onOpen(s: any) {
this.socket = s; // Ensure we use the active socket instance
const reserved = Buffer.alloc(8);
reserved[5] |= 0x10; // Extension Protocol support (BEP 10)
s.write(Buffer.concat([
Buffer.from([19]), Buffer.from("BitTorrent protocol"),
reserved, Buffer.from(this.hash, 'hex'), Buffer.from("-BT0001-" + crypto.randomBytes(6).toString("hex"))
]));
}
private onData(s: any, data: Buffer) {
this.socket = s;
this.buffer = Buffer.concat([this.buffer, data]);
if (!this.handshakeDone) {
if (this.buffer.length >= 68 && this.buffer[0] === 19) {
this.handshakeDone = true;
this.buffer = this.buffer.slice(68);
console.log(`[Worker] Handshake done with ${this.host}`);
this.onReady(); // Trigger scheduler to send INTERESTED
} else return;
}
while (this.buffer.length >= 4) {
const length = this.buffer.readUInt32BE(0);
if (length === 0) { // keep-alive
this.buffer = this.buffer.slice(4);
continue;
}
if (this.buffer.length < length + 4) break;
const msg = this.buffer.slice(4, length + 4);
this.buffer = this.buffer.slice(length + 4);
this.handleMessage(s, msg);
}
}
private handleMessage(s: any, msg: Buffer) {
this.socket = s;
const id = msg[0];
const payload = msg.slice(1);
// Silent for now unless debugging: console.log(`[Worker] MSG ${id} from ${this.host}`);
switch (id) {
case 0: // choke
this.peerChoked = true;
break;
case 1: // unchoke
this.peerChoked = false;
this.onReady(); // Ready to request blocks now!
break;
case 4: // have
const haveIdx = payload.readUInt32BE(0);
this.peerBitfield?.set(haveIdx);
this.onReady();
break;
case 5: // bitfield
if (this.peerBitfield) {
console.log(`[Worker] BITFIELD from ${this.host} (${payload.length} bytes)`);
this.peerBitfield.fromBuffer(payload);
}
this.onReady(); // Check for interest
break;
case 7: // piece
const index = payload.readUInt32BE(0);
const begin = payload.readUInt32BE(4);
const block = payload.slice(8);
this.pendingRequests.delete(`${index}:${begin}`);
this.onPiece(index, begin, block);
this.activeRequests--;
break;
}
}
public signalInterest() {
if (!this.socket || this.amInterested) return;
console.log(`[Worker] Proactively sending INTERESTED to ${this.host}`);
this.socket.write(Buffer.from([0, 0, 0, 1, 2]));
this.amInterested = true;
}
private pendingRequests = new Set<string>(); // "index:begin"
public getPendingRequests() {
return this.pendingRequests;
}
public requestBlock(index: number, begin: number, length: number) {
if (!this.socket) return;
const key = `${index}:${begin}`;
if (this.pendingRequests.has(key)) return;
// Signal interest if not already
if (!this.amInterested) {
console.log(`[Worker] Sending INTERESTED to ${this.host}`);
this.socket.write(Buffer.from([0, 0, 0, 1, 2]));
this.amInterested = true;
}
if (this.peerChoked) return; // Wait for unchoke BEFORE sending REQUEST
if (this.activeRequests < 5) {
const req = Buffer.alloc(13);
req[0] = 6; // request ID
req.writeUInt32BE(index, 1);
req.writeUInt32BE(begin, 5);
req.writeUInt32BE(length, 9);
const len = Buffer.alloc(4);
len.writeUInt32BE(13);
this.socket.write(Buffer.concat([len, req]));
this.activeRequests++;
this.pendingRequests.add(key);
}
}
public hasPiece(index: number) {
return this.peerBitfield?.has(index) ?? false;
}
}

View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"moduleResolution": "Node",
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"outDir": "dist",
"baseUrl": ".",
"paths": {
"@torrent-client/shared": [
"../shared/src"
]
},
"types": [
"bun-types"
]
},
"include": [
"src/**/*"
],
"exclude": []
}

View File

@@ -0,0 +1,16 @@
{
"name": "@torrent-client/shared",
"version": "1.0.0",
"private": true,
"type": "module",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"test": "vitest run"
},
"devDependencies": {
"@types/node": "^18.17.0",
"bun-types": "latest"
}
}

View File

@@ -0,0 +1,137 @@
/**
* Bencode Decoder
* Implements the BitTorrent Bencode format:
* Strings: <length>:<contents>
* Integers: i<integer>e
* Lists: l<contents>e
* Dictionaries: d<contents>e
*/
export class BencodeDecoder {
buffer;
offset = 0;
getOffset() { return this.offset; }
constructor(data) {
if (typeof data === 'string') {
this.buffer = new TextEncoder().encode(data);
}
else {
this.buffer = data;
}
}
decode() {
const char = String.fromCharCode(this.buffer[this.offset]);
if (char === 'i') {
return this.decodeInteger();
}
else if (char === 'l') {
return this.decodeList();
}
else if (char === 'd') {
return this.decodeDictionary();
}
else if (char >= '0' && char <= '9') {
return this.decodeString();
}
throw new Error(`Unexpected character at offset ${this.offset}: ${char}`);
}
decodeInteger() {
this.offset++; // skip 'i'
const end = this.buffer.indexOf(101, this.offset); // 'e' is 101
if (end === -1)
throw new Error('Unterminated integer');
const raw = new TextDecoder().decode(this.buffer.subarray(this.offset, end));
this.offset = end + 1;
return parseInt(raw, 10);
}
decodeString() {
const colonIndex = this.buffer.indexOf(58, this.offset); // ':' is 58
if (colonIndex === -1)
throw new Error('Invalid string length');
const lengthStr = new TextDecoder().decode(this.buffer.subarray(this.offset, colonIndex));
const length = parseInt(lengthStr, 10);
this.offset = colonIndex + 1;
const data = this.buffer.subarray(this.offset, this.offset + length);
this.offset += length;
// Try to decode as UTF-8, if it fails or looks binary, keep as Uint8Array
try {
const decoded = new TextDecoder('utf-8', { fatal: true }).decode(data);
// Heuristic: if it contains non-printable characters or looks like a hash, keep as binary
// But for simplicity in this learning exercise, we'll try to return string where possible
return decoded;
}
catch {
return data;
}
}
decodeList() {
this.offset++; // skip 'l'
const list = [];
while (this.buffer[this.offset] !== 101) { // 'e'
list.push(this.decode());
}
this.offset++; // skip 'e'
return list;
}
decodeDictionary() {
this.offset++; // skip 'd'
const dict = {};
while (this.buffer[this.offset] !== 101) { // 'e'
const key = this.decode();
if (typeof key !== 'string')
throw new Error('Dictionary keys must be strings');
const value = this.decode();
dict[key] = value;
}
this.offset++; // skip 'e'
return dict;
}
static decode(data) {
return new BencodeDecoder(data).decode();
}
}
export class BencodeEncoder {
static encoder = new TextEncoder();
static concatUint8Arrays(arrays) {
const totalLength = arrays.reduce((acc, arr) => acc + arr.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const arr of arrays) {
result.set(arr, offset);
offset += arr.length;
}
return result;
}
static encode(value) {
if (typeof value === 'string') {
const bytes = this.encoder.encode(value);
const prefix = this.encoder.encode(`${bytes.length}:`);
return this.concatUint8Arrays([prefix, bytes]);
}
else if (typeof value === 'number') {
return this.encoder.encode(`i${Math.floor(value)}e`);
}
else if (value instanceof Uint8Array) {
const prefix = this.encoder.encode(`${value.length}:`);
return this.concatUint8Arrays([prefix, value]);
}
else if (Array.isArray(value)) {
const parts = [this.encoder.encode('l')];
for (const item of value) {
parts.push(BencodeEncoder.encode(item));
}
parts.push(this.encoder.encode('e'));
return this.concatUint8Arrays(parts);
}
else if (typeof value === 'object' && value !== null) {
const parts = [this.encoder.encode('d')];
const keys = Object.keys(value).sort();
for (const key of keys) {
parts.push(BencodeEncoder.encode(key));
parts.push(BencodeEncoder.encode(value[key]));
}
parts.push(this.encoder.encode('e'));
return this.concatUint8Arrays(parts);
}
throw new Error(`Unsupported value type for bencoding: ${typeof value}`);
}
}

View File

@@ -0,0 +1,37 @@
import { describe, it, expect } from 'vitest';
import { BencodeDecoder } from './bencode';
describe('BencodeDecoder', () => {
it('should decode integers', () => {
expect(BencodeDecoder.decode('i42e')).toBe(42);
expect(BencodeDecoder.decode('i-42e')).toBe(-42);
expect(BencodeDecoder.decode('i0e')).toBe(0);
});
it('should decode strings', () => {
expect(BencodeDecoder.decode('4:spam')).toBe('spam');
expect(BencodeDecoder.decode('0:')).toBe('');
});
it('should decode lists', () => {
expect(BencodeDecoder.decode('l4:spami42ee')).toEqual(['spam', 42]);
expect(BencodeDecoder.decode('le')).toEqual([]);
});
it('should decode dictionaries', () => {
expect(BencodeDecoder.decode('d3:bar4:spam3:fooi42ee')).toEqual({
bar: 'spam',
foo: 42
});
expect(BencodeDecoder.decode('de')).toEqual({});
});
it('should handle nested structures', () => {
const encoded = 'd4:listl4:spami42ee3:subd3:key5:valueee';
const decoded = BencodeDecoder.decode(encoded);
expect(decoded).toEqual({
list: ['spam', 42],
sub: { key: 'value' }
});
});
it('should throw error on invalid bencode', () => {
expect(() => BencodeDecoder.decode('x')).toThrow();
expect(() => BencodeDecoder.decode('i42')).toThrow();
expect(() => BencodeDecoder.decode('l4:spam')).toThrow();
});
});

View File

@@ -0,0 +1,43 @@
import { describe, it, expect } from 'vitest';
import { BencodeDecoder } from './bencode';
describe('BencodeDecoder', () => {
it('should decode integers', () => {
expect(BencodeDecoder.decode('i42e')).toBe(42);
expect(BencodeDecoder.decode('i-42e')).toBe(-42);
expect(BencodeDecoder.decode('i0e')).toBe(0);
});
it('should decode strings', () => {
expect(BencodeDecoder.decode('4:spam')).toBe('spam');
expect(BencodeDecoder.decode('0:')).toBe('');
});
it('should decode lists', () => {
expect(BencodeDecoder.decode('l4:spami42ee')).toEqual(['spam', 42]);
expect(BencodeDecoder.decode('le')).toEqual([]);
});
it('should decode dictionaries', () => {
expect(BencodeDecoder.decode('d3:bar4:spam3:fooi42ee')).toEqual({
bar: 'spam',
foo: 42
});
expect(BencodeDecoder.decode('de')).toEqual({});
});
it('should handle nested structures', () => {
const encoded = 'd4:listl4:spami42ee3:subd3:key5:valueee';
const decoded = BencodeDecoder.decode(encoded);
expect(decoded).toEqual({
list: ['spam', 42],
sub: { key: 'value' }
});
});
it('should throw error on invalid bencode', () => {
expect(() => BencodeDecoder.decode('x')).toThrow();
expect(() => BencodeDecoder.decode('i42')).toThrow();
expect(() => BencodeDecoder.decode('l4:spam')).toThrow();
});
});

View File

@@ -0,0 +1,148 @@
/**
* Bencode Decoder
* Implements the BitTorrent Bencode format:
* Strings: <length>:<contents>
* Integers: i<integer>e
* Lists: l<contents>e
* Dictionaries: d<contents>e
*/
export type BencodeValue = string | number | BencodeValue[] | { [key: string]: BencodeValue } | Uint8Array;
export class BencodeDecoder {
private buffer: Uint8Array;
private offset: number = 0;
public getOffset(): number { return this.offset; }
constructor(data: Uint8Array | string) {
if (typeof data === 'string') {
this.buffer = new TextEncoder().encode(data);
} else {
this.buffer = data;
}
}
public decode(): BencodeValue {
const char = String.fromCharCode(this.buffer[this.offset]);
if (char === 'i') {
return this.decodeInteger();
} else if (char === 'l') {
return this.decodeList();
} else if (char === 'd') {
return this.decodeDictionary();
} else if (char >= '0' && char <= '9') {
return this.decodeString();
}
throw new Error(`Unexpected character at offset ${this.offset}: ${char}`);
}
private decodeInteger(): number {
this.offset++; // skip 'i'
const end = this.buffer.indexOf(101, this.offset); // 'e' is 101
if (end === -1) throw new Error('Unterminated integer');
const raw = new TextDecoder().decode(this.buffer.subarray(this.offset, end));
this.offset = end + 1;
return parseInt(raw, 10);
}
private decodeString(): Uint8Array | string {
const colonIndex = this.buffer.indexOf(58, this.offset); // ':' is 58
if (colonIndex === -1) throw new Error('Invalid string length');
const lengthStr = new TextDecoder().decode(this.buffer.subarray(this.offset, colonIndex));
const length = parseInt(lengthStr, 10);
this.offset = colonIndex + 1;
const data = this.buffer.subarray(this.offset, this.offset + length);
this.offset += length;
// Try to decode as UTF-8, if it fails or looks binary, keep as Uint8Array
try {
const decoded = new TextDecoder('utf-8', { fatal: true }).decode(data);
// Heuristic: if it contains non-printable characters or looks like a hash, keep as binary
// But for simplicity in this learning exercise, we'll try to return string where possible
return decoded;
} catch {
return data;
}
}
private decodeList(): BencodeValue[] {
this.offset++; // skip 'l'
const list: BencodeValue[] = [];
while (this.buffer[this.offset] !== 101) { // 'e'
list.push(this.decode());
}
this.offset++; // skip 'e'
return list;
}
private decodeDictionary(): { [key: string]: BencodeValue } {
this.offset++; // skip 'd'
const dict: { [key: string]: BencodeValue } = {};
while (this.buffer[this.offset] !== 101) { // 'e'
const key = this.decode();
if (typeof key !== 'string') throw new Error('Dictionary keys must be strings');
const value = this.decode();
dict[key] = value;
}
this.offset++; // skip 'e'
return dict;
}
public static decode(data: Uint8Array | string): BencodeValue {
return new BencodeDecoder(data).decode();
}
}
export class BencodeEncoder {
private static encoder = new TextEncoder();
private static concatUint8Arrays(arrays: Uint8Array[]): Uint8Array {
const totalLength = arrays.reduce((acc, arr) => acc + arr.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const arr of arrays) {
result.set(arr, offset);
offset += arr.length;
}
return result;
}
public static encode(value: BencodeValue): Uint8Array {
if (typeof value === 'string') {
const bytes = this.encoder.encode(value);
const prefix = this.encoder.encode(`${bytes.length}:`);
return this.concatUint8Arrays([prefix as Uint8Array, bytes as Uint8Array]);
} else if (typeof value === 'number') {
return this.encoder.encode(`i${Math.floor(value)}e`);
} else if (value instanceof Uint8Array) {
const prefix = this.encoder.encode(`${value.length}:`);
return this.concatUint8Arrays([prefix as Uint8Array, value as Uint8Array]);
} else if (Array.isArray(value)) {
const parts: Uint8Array[] = [this.encoder.encode('l') as Uint8Array];
for (const item of value) {
parts.push(BencodeEncoder.encode(item) as Uint8Array);
}
parts.push(this.encoder.encode('e') as Uint8Array);
return this.concatUint8Arrays(parts);
} else if (typeof value === 'object' && value !== null) {
const parts: Uint8Array[] = [this.encoder.encode('d') as Uint8Array];
const keys = Object.keys(value).sort();
for (const key of keys) {
parts.push(BencodeEncoder.encode(key) as Uint8Array);
parts.push(BencodeEncoder.encode(value[key as string]) as Uint8Array);
}
parts.push(this.encoder.encode('e') as Uint8Array);
return this.concatUint8Arrays(parts);
}
throw new Error(`Unsupported value type for bencoding: ${typeof value}`);
}
}

View File

@@ -0,0 +1,3 @@
export * from './magnetParser';
export * from './bencode';
export * from './protocol';

View File

@@ -0,0 +1,3 @@
export * from './magnetParser';
export * from './bencode';
export * from './protocol';

View File

@@ -0,0 +1,70 @@
export function parseMagnetURI(uri) {
if (!uri.startsWith('magnet:?')) {
return null;
}
const result = {
tr: [],
kt: [],
ws: [],
so: [],
"x.pe": [],
};
const params = uri.substring(8).split('&');
for (const param of params) {
const [key, value] = param.split('=').map(decodeURIComponent);
if (!key || !value)
continue;
switch (key) {
case 'xt':
result.xt = value;
if (value.startsWith('urn:btih:')) {
result.hash = value.substring(9);
}
break;
case 'dn':
result.dn = value;
break;
case 'tr':
result.tr.push(value);
break;
case 'xl':
result.xl = parseInt(value, 10);
break;
case 'as':
result.as = value;
break;
case 'xs':
result.xs = value;
break;
case 'kt':
result.kt?.push(value);
break;
case 'ws':
result.ws?.push(value);
break;
case 'mt':
result.mt = value;
break;
case 'so':
result.so?.push(value);
break;
case 'x.pe':
result["x.pe"]?.push(value);
break;
default:
// Handle potentially multiple values for unknown keys
if (result[key]) {
if (Array.isArray(result[key])) {
result[key].push(value);
}
else {
result[key] = [result[key], value];
}
}
else {
result[key] = value;
}
}
}
return result;
}

View File

@@ -0,0 +1,52 @@
import { describe, it, expect } from 'vitest';
import { parseMagnetURI } from './magnetParser';
describe('magnetParser', () => {
it('should parse a standard magnet link correctly', () => {
const uri = 'magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash%20by%20Neal%20Stephenson%20EPUB&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337';
const result = parseMagnetURI(uri);
expect(result).not.toBeNull();
expect(result?.hash).toBe('A18230D43BDA105BE7DEF84CB711859018AAA92D');
expect(result?.dn).toBe('Snow Crash by Neal Stephenson EPUB');
expect(result?.tr).toContain('udp://tracker.opentrackr.org:1337');
});
it('should handle multiple trackers', () => {
const uri = 'magnet:?xt=urn:btih:hash&tr=tracker1&tr=tracker2';
const result = parseMagnetURI(uri);
expect(result?.tr).toHaveLength(2);
expect(result?.tr).toContain('tracker1');
expect(result?.tr).toContain('tracker2');
});
it('should parse file length (xl)', () => {
const uri = 'magnet:?xt=urn:btih:hash&xl=1024';
const result = parseMagnetURI(uri);
expect(result?.xl).toBe(1024);
});
it('should parse acceptable sources (as)', () => {
const uri = 'magnet:?xt=urn:btih:hash&as=http://example.com/file';
const result = parseMagnetURI(uri);
expect(result?.as).toBe('http://example.com/file');
});
it('should parse keywords (kt)', () => {
const uri = 'magnet:?xt=urn:btih:hash&kt=ebook&kt=stephenson';
const result = parseMagnetURI(uri);
expect(result?.kt).toContain('ebook');
expect(result?.kt).toContain('stephenson');
});
it('should return null for invalid schemes', () => {
const uri = 'http://example.com';
const result = parseMagnetURI(uri);
expect(result).toBeNull();
});
it('should handle unknown parameters', () => {
const uri = 'magnet:?xt=urn:btih:hash&foo=bar';
const result = parseMagnetURI(uri);
expect(result?.foo).toBe('bar');
});
it('should handle multiple unknown parameters with the same key', () => {
const uri = 'magnet:?xt=urn:btih:hash&foo=bar&foo=baz';
const result = parseMagnetURI(uri);
expect(Array.isArray(result?.foo)).toBe(true);
expect(result?.foo).toContain('bar');
expect(result?.foo).toContain('baz');
});
});

View File

@@ -0,0 +1,68 @@
import { describe, it, expect } from 'vitest';
import { parseMagnetURI } from './magnetParser';
describe('magnetParser', () => {
it('should parse a standard magnet link correctly', () => {
const uri = 'magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash%20by%20Neal%20Stephenson%20EPUB&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337';
const result = parseMagnetURI(uri);
expect(result).not.toBeNull();
expect(result?.hash).toBe('A18230D43BDA105BE7DEF84CB711859018AAA92D');
expect(result?.dn).toBe('Snow Crash by Neal Stephenson EPUB');
expect(result?.tr).toContain('udp://tracker.opentrackr.org:1337');
});
it('should handle multiple trackers', () => {
const uri = 'magnet:?xt=urn:btih:hash&tr=tracker1&tr=tracker2';
const result = parseMagnetURI(uri);
expect(result?.tr).toHaveLength(2);
expect(result?.tr).toContain('tracker1');
expect(result?.tr).toContain('tracker2');
});
it('should parse file length (xl)', () => {
const uri = 'magnet:?xt=urn:btih:hash&xl=1024';
const result = parseMagnetURI(uri);
expect(result?.xl).toBe(1024);
});
it('should parse acceptable sources (as)', () => {
const uri = 'magnet:?xt=urn:btih:hash&as=http://example.com/file';
const result = parseMagnetURI(uri);
expect(result?.as).toBe('http://example.com/file');
});
it('should parse keywords (kt)', () => {
const uri = 'magnet:?xt=urn:btih:hash&kt=ebook&kt=stephenson';
const result = parseMagnetURI(uri);
expect(result?.kt).toContain('ebook');
expect(result?.kt).toContain('stephenson');
});
it('should return null for invalid schemes', () => {
const uri = 'http://example.com';
const result = parseMagnetURI(uri);
expect(result).toBeNull();
});
it('should handle unknown parameters', () => {
const uri = 'magnet:?xt=urn:btih:hash&foo=bar';
const result = parseMagnetURI(uri);
expect(result?.foo).toBe('bar');
});
it('should handle multiple unknown parameters with the same key', () => {
const uri = 'magnet:?xt=urn:btih:hash&foo=bar&foo=baz';
const result = parseMagnetURI(uri);
expect(Array.isArray(result?.foo)).toBe(true);
expect(result?.foo).toContain('bar');
expect(result?.foo).toContain('baz');
});
});

View File

@@ -0,0 +1,88 @@
export interface MagnetData {
xt?: string;
hash?: string;
dn?: string;
tr: string[];
xl?: number;
as?: string;
xs?: string;
kt?: string[];
ws?: string[];
mt?: string;
so?: string[];
"x.pe"?: string[];
[key: string]: string | string[] | number | undefined;
}
export function parseMagnetURI(uri: string): MagnetData | null {
if (!uri.startsWith('magnet:?')) {
return null;
}
const result: MagnetData = {
tr: [],
kt: [],
ws: [],
so: [],
"x.pe": [],
};
const params = uri.substring(8).split('&');
for (const param of params) {
const [key, value] = param.split('=').map(decodeURIComponent);
if (!key || !value) continue;
switch (key) {
case 'xt':
result.xt = value;
if (value.startsWith('urn:btih:')) {
result.hash = value.substring(9);
}
break;
case 'dn':
result.dn = value;
break;
case 'tr':
result.tr.push(value);
break;
case 'xl':
result.xl = parseInt(value, 10);
break;
case 'as':
result.as = value;
break;
case 'xs':
result.xs = value;
break;
case 'kt':
result.kt?.push(value);
break;
case 'ws':
result.ws?.push(value);
break;
case 'mt':
result.mt = value;
break;
case 'so':
result.so?.push(value);
break;
case 'x.pe':
result["x.pe"]?.push(value);
break;
default:
// Handle potentially multiple values for unknown keys
if (result[key]) {
if (Array.isArray(result[key])) {
(result[key] as string[]).push(value);
} else {
result[key] = [result[key] as string, value];
}
} else {
result[key] = value;
}
}
}
return result;
}

View File

@@ -0,0 +1,72 @@
/**
* BitTorrent Protocol Structures (BEP 9 & 10)
* This file defines the structures for the Extension Protocol and Metadata Extension.
*/
/**
* Simulates the reassembly of metadata pieces.
* In a real scenario, these pieces would come from different peers over individual TCP packets.
*/
export class MetadataReassembler {
pieces = new Map();
totalSize;
pieceSize = 16384; // 16KiB
constructor(totalSize) {
this.totalSize = totalSize;
}
addPiece(index, data) {
this.pieces.set(index, data);
return this.isComplete();
}
isComplete() {
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
return this.pieces.size === totalPieces;
}
getFullMetadata() {
if (!this.isComplete())
return null;
const fullData = new Uint8Array(this.totalSize);
const sortedIndices = Array.from(this.pieces.keys()).sort((a, b) => a - b);
let offset = 0;
for (const index of sortedIndices) {
const piece = this.pieces.get(index);
fullData.set(piece, offset);
offset += piece.length;
}
return fullData;
}
getProgress() {
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
return (this.pieces.size / totalPieces) * 100;
}
}
/**
* Tracks which pieces we have and provides bitfield generation.
*/
export class Bitfield {
bits;
totalPieces;
constructor(totalPieces) {
this.totalPieces = totalPieces;
this.bits = new Uint8Array(Math.ceil(totalPieces / 8));
}
set(index) {
if (index >= this.totalPieces)
return;
const byteIndex = Math.floor(index / 8);
const bitIndex = 7 - (index % 8);
this.bits[byteIndex] |= (1 << bitIndex);
}
has(index) {
if (index >= this.totalPieces)
return false;
const byteIndex = Math.floor(index / 8);
const bitIndex = 7 - (index % 8);
return (this.bits[byteIndex] & (1 << bitIndex)) !== 0;
}
toBuffer() {
return this.bits;
}
fromBuffer(buffer) {
this.bits = new Uint8Array(buffer);
}
}

View File

@@ -0,0 +1,40 @@
import { expect, test, describe } from "bun:test";
import { MetadataReassembler } from "./protocol";
describe("MetadataReassembler", () => {
test("reassembles single piece correctly", () => {
const data = new TextEncoder().encode("metadata-content");
const reassembler = new MetadataReassembler(data.length);
expect(reassembler.isComplete()).toBe(false);
const complete = reassembler.addPiece(0, data);
expect(complete).toBe(true);
expect(reassembler.isComplete()).toBe(true);
expect(reassembler.getProgress()).toBe(100);
const result = reassembler.getFullMetadata();
expect(new TextDecoder().decode(result)).toBe("metadata-content");
});
test("reassembles multiple pieces out of order", () => {
// 16KiB is the default piece size
const p0 = new Uint8Array(16384).fill(1);
const p1 = new Uint8Array(16384).fill(2);
const p2 = new Uint8Array(100).fill(3); // Final piece
const reassembler = new MetadataReassembler(16384 * 2 + 100);
// Add pieces out of order: 2, 0, 1
reassembler.addPiece(2, p2);
expect(reassembler.isComplete()).toBe(false);
expect(reassembler.getProgress()).toBeCloseTo(33.3, 1);
reassembler.addPiece(0, p0);
expect(reassembler.isComplete()).toBe(false);
const complete = reassembler.addPiece(1, p1);
expect(complete).toBe(true);
const full = reassembler.getFullMetadata();
expect(full.length).toBe(16384 * 2 + 100);
expect(full.slice(0, 16384)).toEqual(p0);
expect(full.slice(16384, 16384 * 2)).toEqual(p1);
expect(full.slice(16384 * 2)).toEqual(p2);
});
test("returns null for incomplete metadata", () => {
const reassembler = new MetadataReassembler(20000);
reassembler.addPiece(0, new Uint8Array(16384));
expect(reassembler.getFullMetadata()).toBeNull();
});
});

View File

@@ -0,0 +1,51 @@
import { expect, test, describe } from "bun:test";
import { MetadataReassembler } from "./protocol";
describe("MetadataReassembler", () => {
test("reassembles single piece correctly", () => {
const data = new TextEncoder().encode("metadata-content");
const reassembler = new MetadataReassembler(data.length);
expect(reassembler.isComplete()).toBe(false);
const complete = reassembler.addPiece(0, data);
expect(complete).toBe(true);
expect(reassembler.isComplete()).toBe(true);
expect(reassembler.getProgress()).toBe(100);
const result = reassembler.getFullMetadata();
expect(new TextDecoder().decode(result!)).toBe("metadata-content");
});
test("reassembles multiple pieces out of order", () => {
// 16KiB is the default piece size
const p0 = new Uint8Array(16384).fill(1);
const p1 = new Uint8Array(16384).fill(2);
const p2 = new Uint8Array(100).fill(3); // Final piece
const reassembler = new MetadataReassembler(16384 * 2 + 100);
// Add pieces out of order: 2, 0, 1
reassembler.addPiece(2, p2);
expect(reassembler.isComplete()).toBe(false);
expect(reassembler.getProgress()).toBeCloseTo(33.3, 1);
reassembler.addPiece(0, p0);
expect(reassembler.isComplete()).toBe(false);
const complete = reassembler.addPiece(1, p1);
expect(complete).toBe(true);
const full = reassembler.getFullMetadata()!;
expect(full.length).toBe(16384 * 2 + 100);
expect(full.slice(0, 16384)).toEqual(p0);
expect(full.slice(16384, 16384 * 2)).toEqual(p1);
expect(full.slice(16384 * 2)).toEqual(p2);
});
test("returns null for incomplete metadata", () => {
const reassembler = new MetadataReassembler(20000);
reassembler.addPiece(0, new Uint8Array(16384));
expect(reassembler.getFullMetadata()).toBeNull();
});
});

View File

@@ -0,0 +1,97 @@
/**
* BitTorrent Protocol Structures (BEP 9 & 10)
* This file defines the structures for the Extension Protocol and Metadata Extension.
*/
export interface ExtensionHandshake {
m: { [extensionName: string]: number }; // Supported extensions and their local IDs
p?: number; // Local TCP port
v?: string; // Client version string
metadata_size?: number; // Total size of the info dictionary in bytes
}
export interface MetadataRequest {
msg_type: number; // 0 for request, 1 for data, 2 for reject
piece: number; // The piece index being requested
total_size?: number; // Only for data messages
}
/**
* Simulates the reassembly of metadata pieces.
* In a real scenario, these pieces would come from different peers over individual TCP packets.
*/
export class MetadataReassembler {
private pieces: Map<number, Uint8Array> = new Map();
private totalSize: number;
private pieceSize: number = 16384; // 16KiB
constructor(totalSize: number) {
this.totalSize = totalSize;
}
public addPiece(index: number, data: Uint8Array): boolean {
this.pieces.set(index, data);
return this.isComplete();
}
public isComplete(): boolean {
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
return this.pieces.size === totalPieces;
}
public getFullMetadata(): Uint8Array | null {
if (!this.isComplete()) return null;
const fullData = new Uint8Array(this.totalSize);
const sortedIndices = Array.from(this.pieces.keys()).sort((a, b) => a - b);
let offset = 0;
for (const index of sortedIndices) {
const piece = this.pieces.get(index)!;
fullData.set(piece, offset);
offset += piece.length;
}
return fullData;
}
public getProgress(): number {
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
return (this.pieces.size / totalPieces) * 100;
}
}
/**
* Tracks which pieces we have and provides bitfield generation.
*/
export class Bitfield {
private bits: Uint8Array;
public totalPieces: number;
constructor(totalPieces: number) {
this.totalPieces = totalPieces;
this.bits = new Uint8Array(Math.ceil(totalPieces / 8));
}
public set(index: number) {
if (index >= this.totalPieces) return;
const byteIndex = Math.floor(index / 8);
const bitIndex = 7 - (index % 8);
this.bits[byteIndex] |= (1 << bitIndex);
}
public has(index: number): boolean {
if (index >= this.totalPieces) return false;
const byteIndex = Math.floor(index / 8);
const bitIndex = 7 - (index % 8);
return (this.bits[byteIndex] & (1 << bitIndex)) !== 0;
}
public toBuffer(): Uint8Array {
return this.bits;
}
public fromBuffer(buffer: Uint8Array) {
this.bits = new Uint8Array(buffer);
}
}

View File

@@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"moduleResolution": "Node",
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"baseUrl": ".",
"paths": {
"@torrent-client/shared": [
"./src"
]
}
},
"include": [
"src/**/*"
],
"exclude": [
"**/*.test.ts"
]
}