Initial commit
This commit is contained in:
137
packages/shared/src/bencode.js
Normal file
137
packages/shared/src/bencode.js
Normal file
@@ -0,0 +1,137 @@
|
||||
/**
|
||||
* Bencode Decoder
|
||||
* Implements the BitTorrent Bencode format:
|
||||
* Strings: <length>:<contents>
|
||||
* Integers: i<integer>e
|
||||
* Lists: l<contents>e
|
||||
* Dictionaries: d<contents>e
|
||||
*/
|
||||
export class BencodeDecoder {
|
||||
buffer;
|
||||
offset = 0;
|
||||
getOffset() { return this.offset; }
|
||||
constructor(data) {
|
||||
if (typeof data === 'string') {
|
||||
this.buffer = new TextEncoder().encode(data);
|
||||
}
|
||||
else {
|
||||
this.buffer = data;
|
||||
}
|
||||
}
|
||||
decode() {
|
||||
const char = String.fromCharCode(this.buffer[this.offset]);
|
||||
if (char === 'i') {
|
||||
return this.decodeInteger();
|
||||
}
|
||||
else if (char === 'l') {
|
||||
return this.decodeList();
|
||||
}
|
||||
else if (char === 'd') {
|
||||
return this.decodeDictionary();
|
||||
}
|
||||
else if (char >= '0' && char <= '9') {
|
||||
return this.decodeString();
|
||||
}
|
||||
throw new Error(`Unexpected character at offset ${this.offset}: ${char}`);
|
||||
}
|
||||
decodeInteger() {
|
||||
this.offset++; // skip 'i'
|
||||
const end = this.buffer.indexOf(101, this.offset); // 'e' is 101
|
||||
if (end === -1)
|
||||
throw new Error('Unterminated integer');
|
||||
const raw = new TextDecoder().decode(this.buffer.subarray(this.offset, end));
|
||||
this.offset = end + 1;
|
||||
return parseInt(raw, 10);
|
||||
}
|
||||
decodeString() {
|
||||
const colonIndex = this.buffer.indexOf(58, this.offset); // ':' is 58
|
||||
if (colonIndex === -1)
|
||||
throw new Error('Invalid string length');
|
||||
const lengthStr = new TextDecoder().decode(this.buffer.subarray(this.offset, colonIndex));
|
||||
const length = parseInt(lengthStr, 10);
|
||||
this.offset = colonIndex + 1;
|
||||
const data = this.buffer.subarray(this.offset, this.offset + length);
|
||||
this.offset += length;
|
||||
// Try to decode as UTF-8, if it fails or looks binary, keep as Uint8Array
|
||||
try {
|
||||
const decoded = new TextDecoder('utf-8', { fatal: true }).decode(data);
|
||||
// Heuristic: if it contains non-printable characters or looks like a hash, keep as binary
|
||||
// But for simplicity in this learning exercise, we'll try to return string where possible
|
||||
return decoded;
|
||||
}
|
||||
catch {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
decodeList() {
|
||||
this.offset++; // skip 'l'
|
||||
const list = [];
|
||||
while (this.buffer[this.offset] !== 101) { // 'e'
|
||||
list.push(this.decode());
|
||||
}
|
||||
this.offset++; // skip 'e'
|
||||
return list;
|
||||
}
|
||||
decodeDictionary() {
|
||||
this.offset++; // skip 'd'
|
||||
const dict = {};
|
||||
while (this.buffer[this.offset] !== 101) { // 'e'
|
||||
const key = this.decode();
|
||||
if (typeof key !== 'string')
|
||||
throw new Error('Dictionary keys must be strings');
|
||||
const value = this.decode();
|
||||
dict[key] = value;
|
||||
}
|
||||
this.offset++; // skip 'e'
|
||||
return dict;
|
||||
}
|
||||
static decode(data) {
|
||||
return new BencodeDecoder(data).decode();
|
||||
}
|
||||
}
|
||||
export class BencodeEncoder {
|
||||
static encoder = new TextEncoder();
|
||||
static concatUint8Arrays(arrays) {
|
||||
const totalLength = arrays.reduce((acc, arr) => acc + arr.length, 0);
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const arr of arrays) {
|
||||
result.set(arr, offset);
|
||||
offset += arr.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
static encode(value) {
|
||||
if (typeof value === 'string') {
|
||||
const bytes = this.encoder.encode(value);
|
||||
const prefix = this.encoder.encode(`${bytes.length}:`);
|
||||
return this.concatUint8Arrays([prefix, bytes]);
|
||||
}
|
||||
else if (typeof value === 'number') {
|
||||
return this.encoder.encode(`i${Math.floor(value)}e`);
|
||||
}
|
||||
else if (value instanceof Uint8Array) {
|
||||
const prefix = this.encoder.encode(`${value.length}:`);
|
||||
return this.concatUint8Arrays([prefix, value]);
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
const parts = [this.encoder.encode('l')];
|
||||
for (const item of value) {
|
||||
parts.push(BencodeEncoder.encode(item));
|
||||
}
|
||||
parts.push(this.encoder.encode('e'));
|
||||
return this.concatUint8Arrays(parts);
|
||||
}
|
||||
else if (typeof value === 'object' && value !== null) {
|
||||
const parts = [this.encoder.encode('d')];
|
||||
const keys = Object.keys(value).sort();
|
||||
for (const key of keys) {
|
||||
parts.push(BencodeEncoder.encode(key));
|
||||
parts.push(BencodeEncoder.encode(value[key]));
|
||||
}
|
||||
parts.push(this.encoder.encode('e'));
|
||||
return this.concatUint8Arrays(parts);
|
||||
}
|
||||
throw new Error(`Unsupported value type for bencoding: ${typeof value}`);
|
||||
}
|
||||
}
|
||||
37
packages/shared/src/bencode.test.js
Normal file
37
packages/shared/src/bencode.test.js
Normal file
@@ -0,0 +1,37 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { BencodeDecoder } from './bencode';
|
||||
describe('BencodeDecoder', () => {
|
||||
it('should decode integers', () => {
|
||||
expect(BencodeDecoder.decode('i42e')).toBe(42);
|
||||
expect(BencodeDecoder.decode('i-42e')).toBe(-42);
|
||||
expect(BencodeDecoder.decode('i0e')).toBe(0);
|
||||
});
|
||||
it('should decode strings', () => {
|
||||
expect(BencodeDecoder.decode('4:spam')).toBe('spam');
|
||||
expect(BencodeDecoder.decode('0:')).toBe('');
|
||||
});
|
||||
it('should decode lists', () => {
|
||||
expect(BencodeDecoder.decode('l4:spami42ee')).toEqual(['spam', 42]);
|
||||
expect(BencodeDecoder.decode('le')).toEqual([]);
|
||||
});
|
||||
it('should decode dictionaries', () => {
|
||||
expect(BencodeDecoder.decode('d3:bar4:spam3:fooi42ee')).toEqual({
|
||||
bar: 'spam',
|
||||
foo: 42
|
||||
});
|
||||
expect(BencodeDecoder.decode('de')).toEqual({});
|
||||
});
|
||||
it('should handle nested structures', () => {
|
||||
const encoded = 'd4:listl4:spami42ee3:subd3:key5:valueee';
|
||||
const decoded = BencodeDecoder.decode(encoded);
|
||||
expect(decoded).toEqual({
|
||||
list: ['spam', 42],
|
||||
sub: { key: 'value' }
|
||||
});
|
||||
});
|
||||
it('should throw error on invalid bencode', () => {
|
||||
expect(() => BencodeDecoder.decode('x')).toThrow();
|
||||
expect(() => BencodeDecoder.decode('i42')).toThrow();
|
||||
expect(() => BencodeDecoder.decode('l4:spam')).toThrow();
|
||||
});
|
||||
});
|
||||
43
packages/shared/src/bencode.test.ts
Normal file
43
packages/shared/src/bencode.test.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { BencodeDecoder } from './bencode';
|
||||
|
||||
describe('BencodeDecoder', () => {
|
||||
it('should decode integers', () => {
|
||||
expect(BencodeDecoder.decode('i42e')).toBe(42);
|
||||
expect(BencodeDecoder.decode('i-42e')).toBe(-42);
|
||||
expect(BencodeDecoder.decode('i0e')).toBe(0);
|
||||
});
|
||||
|
||||
it('should decode strings', () => {
|
||||
expect(BencodeDecoder.decode('4:spam')).toBe('spam');
|
||||
expect(BencodeDecoder.decode('0:')).toBe('');
|
||||
});
|
||||
|
||||
it('should decode lists', () => {
|
||||
expect(BencodeDecoder.decode('l4:spami42ee')).toEqual(['spam', 42]);
|
||||
expect(BencodeDecoder.decode('le')).toEqual([]);
|
||||
});
|
||||
|
||||
it('should decode dictionaries', () => {
|
||||
expect(BencodeDecoder.decode('d3:bar4:spam3:fooi42ee')).toEqual({
|
||||
bar: 'spam',
|
||||
foo: 42
|
||||
});
|
||||
expect(BencodeDecoder.decode('de')).toEqual({});
|
||||
});
|
||||
|
||||
it('should handle nested structures', () => {
|
||||
const encoded = 'd4:listl4:spami42ee3:subd3:key5:valueee';
|
||||
const decoded = BencodeDecoder.decode(encoded);
|
||||
expect(decoded).toEqual({
|
||||
list: ['spam', 42],
|
||||
sub: { key: 'value' }
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error on invalid bencode', () => {
|
||||
expect(() => BencodeDecoder.decode('x')).toThrow();
|
||||
expect(() => BencodeDecoder.decode('i42')).toThrow();
|
||||
expect(() => BencodeDecoder.decode('l4:spam')).toThrow();
|
||||
});
|
||||
});
|
||||
148
packages/shared/src/bencode.ts
Normal file
148
packages/shared/src/bencode.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Bencode Decoder
|
||||
* Implements the BitTorrent Bencode format:
|
||||
* Strings: <length>:<contents>
|
||||
* Integers: i<integer>e
|
||||
* Lists: l<contents>e
|
||||
* Dictionaries: d<contents>e
|
||||
*/
|
||||
|
||||
export type BencodeValue = string | number | BencodeValue[] | { [key: string]: BencodeValue } | Uint8Array;
|
||||
|
||||
export class BencodeDecoder {
|
||||
private buffer: Uint8Array;
|
||||
private offset: number = 0;
|
||||
public getOffset(): number { return this.offset; }
|
||||
|
||||
constructor(data: Uint8Array | string) {
|
||||
if (typeof data === 'string') {
|
||||
this.buffer = new TextEncoder().encode(data);
|
||||
} else {
|
||||
this.buffer = data;
|
||||
}
|
||||
}
|
||||
|
||||
public decode(): BencodeValue {
|
||||
const char = String.fromCharCode(this.buffer[this.offset]);
|
||||
|
||||
if (char === 'i') {
|
||||
return this.decodeInteger();
|
||||
} else if (char === 'l') {
|
||||
return this.decodeList();
|
||||
} else if (char === 'd') {
|
||||
return this.decodeDictionary();
|
||||
} else if (char >= '0' && char <= '9') {
|
||||
return this.decodeString();
|
||||
}
|
||||
|
||||
throw new Error(`Unexpected character at offset ${this.offset}: ${char}`);
|
||||
}
|
||||
|
||||
private decodeInteger(): number {
|
||||
this.offset++; // skip 'i'
|
||||
const end = this.buffer.indexOf(101, this.offset); // 'e' is 101
|
||||
if (end === -1) throw new Error('Unterminated integer');
|
||||
|
||||
const raw = new TextDecoder().decode(this.buffer.subarray(this.offset, end));
|
||||
this.offset = end + 1;
|
||||
return parseInt(raw, 10);
|
||||
}
|
||||
|
||||
private decodeString(): Uint8Array | string {
|
||||
const colonIndex = this.buffer.indexOf(58, this.offset); // ':' is 58
|
||||
if (colonIndex === -1) throw new Error('Invalid string length');
|
||||
|
||||
const lengthStr = new TextDecoder().decode(this.buffer.subarray(this.offset, colonIndex));
|
||||
const length = parseInt(lengthStr, 10);
|
||||
this.offset = colonIndex + 1;
|
||||
|
||||
const data = this.buffer.subarray(this.offset, this.offset + length);
|
||||
this.offset += length;
|
||||
|
||||
// Try to decode as UTF-8, if it fails or looks binary, keep as Uint8Array
|
||||
try {
|
||||
const decoded = new TextDecoder('utf-8', { fatal: true }).decode(data);
|
||||
// Heuristic: if it contains non-printable characters or looks like a hash, keep as binary
|
||||
// But for simplicity in this learning exercise, we'll try to return string where possible
|
||||
return decoded;
|
||||
} catch {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
private decodeList(): BencodeValue[] {
|
||||
this.offset++; // skip 'l'
|
||||
const list: BencodeValue[] = [];
|
||||
|
||||
while (this.buffer[this.offset] !== 101) { // 'e'
|
||||
list.push(this.decode());
|
||||
}
|
||||
|
||||
this.offset++; // skip 'e'
|
||||
return list;
|
||||
}
|
||||
|
||||
private decodeDictionary(): { [key: string]: BencodeValue } {
|
||||
this.offset++; // skip 'd'
|
||||
const dict: { [key: string]: BencodeValue } = {};
|
||||
|
||||
while (this.buffer[this.offset] !== 101) { // 'e'
|
||||
const key = this.decode();
|
||||
if (typeof key !== 'string') throw new Error('Dictionary keys must be strings');
|
||||
const value = this.decode();
|
||||
dict[key] = value;
|
||||
}
|
||||
|
||||
this.offset++; // skip 'e'
|
||||
return dict;
|
||||
}
|
||||
|
||||
public static decode(data: Uint8Array | string): BencodeValue {
|
||||
return new BencodeDecoder(data).decode();
|
||||
}
|
||||
}
|
||||
|
||||
export class BencodeEncoder {
|
||||
private static encoder = new TextEncoder();
|
||||
|
||||
private static concatUint8Arrays(arrays: Uint8Array[]): Uint8Array {
|
||||
const totalLength = arrays.reduce((acc, arr) => acc + arr.length, 0);
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const arr of arrays) {
|
||||
result.set(arr, offset);
|
||||
offset += arr.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static encode(value: BencodeValue): Uint8Array {
|
||||
if (typeof value === 'string') {
|
||||
const bytes = this.encoder.encode(value);
|
||||
const prefix = this.encoder.encode(`${bytes.length}:`);
|
||||
return this.concatUint8Arrays([prefix as Uint8Array, bytes as Uint8Array]);
|
||||
} else if (typeof value === 'number') {
|
||||
return this.encoder.encode(`i${Math.floor(value)}e`);
|
||||
} else if (value instanceof Uint8Array) {
|
||||
const prefix = this.encoder.encode(`${value.length}:`);
|
||||
return this.concatUint8Arrays([prefix as Uint8Array, value as Uint8Array]);
|
||||
} else if (Array.isArray(value)) {
|
||||
const parts: Uint8Array[] = [this.encoder.encode('l') as Uint8Array];
|
||||
for (const item of value) {
|
||||
parts.push(BencodeEncoder.encode(item) as Uint8Array);
|
||||
}
|
||||
parts.push(this.encoder.encode('e') as Uint8Array);
|
||||
return this.concatUint8Arrays(parts);
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
const parts: Uint8Array[] = [this.encoder.encode('d') as Uint8Array];
|
||||
const keys = Object.keys(value).sort();
|
||||
for (const key of keys) {
|
||||
parts.push(BencodeEncoder.encode(key) as Uint8Array);
|
||||
parts.push(BencodeEncoder.encode(value[key as string]) as Uint8Array);
|
||||
}
|
||||
parts.push(this.encoder.encode('e') as Uint8Array);
|
||||
return this.concatUint8Arrays(parts);
|
||||
}
|
||||
throw new Error(`Unsupported value type for bencoding: ${typeof value}`);
|
||||
}
|
||||
}
|
||||
3
packages/shared/src/index.js
Normal file
3
packages/shared/src/index.js
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './magnetParser';
|
||||
export * from './bencode';
|
||||
export * from './protocol';
|
||||
3
packages/shared/src/index.ts
Normal file
3
packages/shared/src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './magnetParser';
|
||||
export * from './bencode';
|
||||
export * from './protocol';
|
||||
70
packages/shared/src/magnetParser.js
Normal file
70
packages/shared/src/magnetParser.js
Normal file
@@ -0,0 +1,70 @@
|
||||
export function parseMagnetURI(uri) {
|
||||
if (!uri.startsWith('magnet:?')) {
|
||||
return null;
|
||||
}
|
||||
const result = {
|
||||
tr: [],
|
||||
kt: [],
|
||||
ws: [],
|
||||
so: [],
|
||||
"x.pe": [],
|
||||
};
|
||||
const params = uri.substring(8).split('&');
|
||||
for (const param of params) {
|
||||
const [key, value] = param.split('=').map(decodeURIComponent);
|
||||
if (!key || !value)
|
||||
continue;
|
||||
switch (key) {
|
||||
case 'xt':
|
||||
result.xt = value;
|
||||
if (value.startsWith('urn:btih:')) {
|
||||
result.hash = value.substring(9);
|
||||
}
|
||||
break;
|
||||
case 'dn':
|
||||
result.dn = value;
|
||||
break;
|
||||
case 'tr':
|
||||
result.tr.push(value);
|
||||
break;
|
||||
case 'xl':
|
||||
result.xl = parseInt(value, 10);
|
||||
break;
|
||||
case 'as':
|
||||
result.as = value;
|
||||
break;
|
||||
case 'xs':
|
||||
result.xs = value;
|
||||
break;
|
||||
case 'kt':
|
||||
result.kt?.push(value);
|
||||
break;
|
||||
case 'ws':
|
||||
result.ws?.push(value);
|
||||
break;
|
||||
case 'mt':
|
||||
result.mt = value;
|
||||
break;
|
||||
case 'so':
|
||||
result.so?.push(value);
|
||||
break;
|
||||
case 'x.pe':
|
||||
result["x.pe"]?.push(value);
|
||||
break;
|
||||
default:
|
||||
// Handle potentially multiple values for unknown keys
|
||||
if (result[key]) {
|
||||
if (Array.isArray(result[key])) {
|
||||
result[key].push(value);
|
||||
}
|
||||
else {
|
||||
result[key] = [result[key], value];
|
||||
}
|
||||
}
|
||||
else {
|
||||
result[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
52
packages/shared/src/magnetParser.test.js
Normal file
52
packages/shared/src/magnetParser.test.js
Normal file
@@ -0,0 +1,52 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseMagnetURI } from './magnetParser';
|
||||
describe('magnetParser', () => {
|
||||
it('should parse a standard magnet link correctly', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash%20by%20Neal%20Stephenson%20EPUB&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.hash).toBe('A18230D43BDA105BE7DEF84CB711859018AAA92D');
|
||||
expect(result?.dn).toBe('Snow Crash by Neal Stephenson EPUB');
|
||||
expect(result?.tr).toContain('udp://tracker.opentrackr.org:1337');
|
||||
});
|
||||
it('should handle multiple trackers', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&tr=tracker1&tr=tracker2';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result?.tr).toHaveLength(2);
|
||||
expect(result?.tr).toContain('tracker1');
|
||||
expect(result?.tr).toContain('tracker2');
|
||||
});
|
||||
it('should parse file length (xl)', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&xl=1024';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result?.xl).toBe(1024);
|
||||
});
|
||||
it('should parse acceptable sources (as)', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&as=http://example.com/file';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result?.as).toBe('http://example.com/file');
|
||||
});
|
||||
it('should parse keywords (kt)', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&kt=ebook&kt=stephenson';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result?.kt).toContain('ebook');
|
||||
expect(result?.kt).toContain('stephenson');
|
||||
});
|
||||
it('should return null for invalid schemes', () => {
|
||||
const uri = 'http://example.com';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
it('should handle unknown parameters', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&foo=bar';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(result?.foo).toBe('bar');
|
||||
});
|
||||
it('should handle multiple unknown parameters with the same key', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&foo=bar&foo=baz';
|
||||
const result = parseMagnetURI(uri);
|
||||
expect(Array.isArray(result?.foo)).toBe(true);
|
||||
expect(result?.foo).toContain('bar');
|
||||
expect(result?.foo).toContain('baz');
|
||||
});
|
||||
});
|
||||
68
packages/shared/src/magnetParser.test.ts
Normal file
68
packages/shared/src/magnetParser.test.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseMagnetURI } from './magnetParser';
|
||||
|
||||
describe('magnetParser', () => {
|
||||
it('should parse a standard magnet link correctly', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:A18230D43BDA105BE7DEF84CB711859018AAA92D&dn=Snow%20Crash%20by%20Neal%20Stephenson%20EPUB&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.hash).toBe('A18230D43BDA105BE7DEF84CB711859018AAA92D');
|
||||
expect(result?.dn).toBe('Snow Crash by Neal Stephenson EPUB');
|
||||
expect(result?.tr).toContain('udp://tracker.opentrackr.org:1337');
|
||||
});
|
||||
|
||||
it('should handle multiple trackers', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&tr=tracker1&tr=tracker2';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result?.tr).toHaveLength(2);
|
||||
expect(result?.tr).toContain('tracker1');
|
||||
expect(result?.tr).toContain('tracker2');
|
||||
});
|
||||
|
||||
it('should parse file length (xl)', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&xl=1024';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result?.xl).toBe(1024);
|
||||
});
|
||||
|
||||
it('should parse acceptable sources (as)', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&as=http://example.com/file';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result?.as).toBe('http://example.com/file');
|
||||
});
|
||||
|
||||
it('should parse keywords (kt)', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&kt=ebook&kt=stephenson';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result?.kt).toContain('ebook');
|
||||
expect(result?.kt).toContain('stephenson');
|
||||
});
|
||||
|
||||
it('should return null for invalid schemes', () => {
|
||||
const uri = 'http://example.com';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle unknown parameters', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&foo=bar';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(result?.foo).toBe('bar');
|
||||
});
|
||||
|
||||
it('should handle multiple unknown parameters with the same key', () => {
|
||||
const uri = 'magnet:?xt=urn:btih:hash&foo=bar&foo=baz';
|
||||
const result = parseMagnetURI(uri);
|
||||
|
||||
expect(Array.isArray(result?.foo)).toBe(true);
|
||||
expect(result?.foo).toContain('bar');
|
||||
expect(result?.foo).toContain('baz');
|
||||
});
|
||||
});
|
||||
88
packages/shared/src/magnetParser.ts
Normal file
88
packages/shared/src/magnetParser.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
export interface MagnetData {
|
||||
xt?: string;
|
||||
hash?: string;
|
||||
dn?: string;
|
||||
tr: string[];
|
||||
xl?: number;
|
||||
as?: string;
|
||||
xs?: string;
|
||||
kt?: string[];
|
||||
ws?: string[];
|
||||
mt?: string;
|
||||
so?: string[];
|
||||
"x.pe"?: string[];
|
||||
[key: string]: string | string[] | number | undefined;
|
||||
}
|
||||
|
||||
export function parseMagnetURI(uri: string): MagnetData | null {
|
||||
if (!uri.startsWith('magnet:?')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const result: MagnetData = {
|
||||
tr: [],
|
||||
kt: [],
|
||||
ws: [],
|
||||
so: [],
|
||||
"x.pe": [],
|
||||
};
|
||||
|
||||
const params = uri.substring(8).split('&');
|
||||
|
||||
for (const param of params) {
|
||||
const [key, value] = param.split('=').map(decodeURIComponent);
|
||||
if (!key || !value) continue;
|
||||
|
||||
switch (key) {
|
||||
case 'xt':
|
||||
result.xt = value;
|
||||
if (value.startsWith('urn:btih:')) {
|
||||
result.hash = value.substring(9);
|
||||
}
|
||||
break;
|
||||
case 'dn':
|
||||
result.dn = value;
|
||||
break;
|
||||
case 'tr':
|
||||
result.tr.push(value);
|
||||
break;
|
||||
case 'xl':
|
||||
result.xl = parseInt(value, 10);
|
||||
break;
|
||||
case 'as':
|
||||
result.as = value;
|
||||
break;
|
||||
case 'xs':
|
||||
result.xs = value;
|
||||
break;
|
||||
case 'kt':
|
||||
result.kt?.push(value);
|
||||
break;
|
||||
case 'ws':
|
||||
result.ws?.push(value);
|
||||
break;
|
||||
case 'mt':
|
||||
result.mt = value;
|
||||
break;
|
||||
case 'so':
|
||||
result.so?.push(value);
|
||||
break;
|
||||
case 'x.pe':
|
||||
result["x.pe"]?.push(value);
|
||||
break;
|
||||
default:
|
||||
// Handle potentially multiple values for unknown keys
|
||||
if (result[key]) {
|
||||
if (Array.isArray(result[key])) {
|
||||
(result[key] as string[]).push(value);
|
||||
} else {
|
||||
result[key] = [result[key] as string, value];
|
||||
}
|
||||
} else {
|
||||
result[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
72
packages/shared/src/protocol.js
Normal file
72
packages/shared/src/protocol.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* BitTorrent Protocol Structures (BEP 9 & 10)
|
||||
* This file defines the structures for the Extension Protocol and Metadata Extension.
|
||||
*/
|
||||
/**
|
||||
* Simulates the reassembly of metadata pieces.
|
||||
* In a real scenario, these pieces would come from different peers over individual TCP packets.
|
||||
*/
|
||||
export class MetadataReassembler {
|
||||
pieces = new Map();
|
||||
totalSize;
|
||||
pieceSize = 16384; // 16KiB
|
||||
constructor(totalSize) {
|
||||
this.totalSize = totalSize;
|
||||
}
|
||||
addPiece(index, data) {
|
||||
this.pieces.set(index, data);
|
||||
return this.isComplete();
|
||||
}
|
||||
isComplete() {
|
||||
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
|
||||
return this.pieces.size === totalPieces;
|
||||
}
|
||||
getFullMetadata() {
|
||||
if (!this.isComplete())
|
||||
return null;
|
||||
const fullData = new Uint8Array(this.totalSize);
|
||||
const sortedIndices = Array.from(this.pieces.keys()).sort((a, b) => a - b);
|
||||
let offset = 0;
|
||||
for (const index of sortedIndices) {
|
||||
const piece = this.pieces.get(index);
|
||||
fullData.set(piece, offset);
|
||||
offset += piece.length;
|
||||
}
|
||||
return fullData;
|
||||
}
|
||||
getProgress() {
|
||||
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
|
||||
return (this.pieces.size / totalPieces) * 100;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Tracks which pieces we have and provides bitfield generation.
|
||||
*/
|
||||
export class Bitfield {
|
||||
bits;
|
||||
totalPieces;
|
||||
constructor(totalPieces) {
|
||||
this.totalPieces = totalPieces;
|
||||
this.bits = new Uint8Array(Math.ceil(totalPieces / 8));
|
||||
}
|
||||
set(index) {
|
||||
if (index >= this.totalPieces)
|
||||
return;
|
||||
const byteIndex = Math.floor(index / 8);
|
||||
const bitIndex = 7 - (index % 8);
|
||||
this.bits[byteIndex] |= (1 << bitIndex);
|
||||
}
|
||||
has(index) {
|
||||
if (index >= this.totalPieces)
|
||||
return false;
|
||||
const byteIndex = Math.floor(index / 8);
|
||||
const bitIndex = 7 - (index % 8);
|
||||
return (this.bits[byteIndex] & (1 << bitIndex)) !== 0;
|
||||
}
|
||||
toBuffer() {
|
||||
return this.bits;
|
||||
}
|
||||
fromBuffer(buffer) {
|
||||
this.bits = new Uint8Array(buffer);
|
||||
}
|
||||
}
|
||||
40
packages/shared/src/protocol.test.js
Normal file
40
packages/shared/src/protocol.test.js
Normal file
@@ -0,0 +1,40 @@
|
||||
import { expect, test, describe } from "bun:test";
|
||||
import { MetadataReassembler } from "./protocol";
|
||||
describe("MetadataReassembler", () => {
|
||||
test("reassembles single piece correctly", () => {
|
||||
const data = new TextEncoder().encode("metadata-content");
|
||||
const reassembler = new MetadataReassembler(data.length);
|
||||
expect(reassembler.isComplete()).toBe(false);
|
||||
const complete = reassembler.addPiece(0, data);
|
||||
expect(complete).toBe(true);
|
||||
expect(reassembler.isComplete()).toBe(true);
|
||||
expect(reassembler.getProgress()).toBe(100);
|
||||
const result = reassembler.getFullMetadata();
|
||||
expect(new TextDecoder().decode(result)).toBe("metadata-content");
|
||||
});
|
||||
test("reassembles multiple pieces out of order", () => {
|
||||
// 16KiB is the default piece size
|
||||
const p0 = new Uint8Array(16384).fill(1);
|
||||
const p1 = new Uint8Array(16384).fill(2);
|
||||
const p2 = new Uint8Array(100).fill(3); // Final piece
|
||||
const reassembler = new MetadataReassembler(16384 * 2 + 100);
|
||||
// Add pieces out of order: 2, 0, 1
|
||||
reassembler.addPiece(2, p2);
|
||||
expect(reassembler.isComplete()).toBe(false);
|
||||
expect(reassembler.getProgress()).toBeCloseTo(33.3, 1);
|
||||
reassembler.addPiece(0, p0);
|
||||
expect(reassembler.isComplete()).toBe(false);
|
||||
const complete = reassembler.addPiece(1, p1);
|
||||
expect(complete).toBe(true);
|
||||
const full = reassembler.getFullMetadata();
|
||||
expect(full.length).toBe(16384 * 2 + 100);
|
||||
expect(full.slice(0, 16384)).toEqual(p0);
|
||||
expect(full.slice(16384, 16384 * 2)).toEqual(p1);
|
||||
expect(full.slice(16384 * 2)).toEqual(p2);
|
||||
});
|
||||
test("returns null for incomplete metadata", () => {
|
||||
const reassembler = new MetadataReassembler(20000);
|
||||
reassembler.addPiece(0, new Uint8Array(16384));
|
||||
expect(reassembler.getFullMetadata()).toBeNull();
|
||||
});
|
||||
});
|
||||
51
packages/shared/src/protocol.test.ts
Normal file
51
packages/shared/src/protocol.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { expect, test, describe } from "bun:test";
|
||||
import { MetadataReassembler } from "./protocol";
|
||||
|
||||
describe("MetadataReassembler", () => {
|
||||
test("reassembles single piece correctly", () => {
|
||||
const data = new TextEncoder().encode("metadata-content");
|
||||
const reassembler = new MetadataReassembler(data.length);
|
||||
|
||||
expect(reassembler.isComplete()).toBe(false);
|
||||
const complete = reassembler.addPiece(0, data);
|
||||
|
||||
expect(complete).toBe(true);
|
||||
expect(reassembler.isComplete()).toBe(true);
|
||||
expect(reassembler.getProgress()).toBe(100);
|
||||
|
||||
const result = reassembler.getFullMetadata();
|
||||
expect(new TextDecoder().decode(result!)).toBe("metadata-content");
|
||||
});
|
||||
|
||||
test("reassembles multiple pieces out of order", () => {
|
||||
// 16KiB is the default piece size
|
||||
const p0 = new Uint8Array(16384).fill(1);
|
||||
const p1 = new Uint8Array(16384).fill(2);
|
||||
const p2 = new Uint8Array(100).fill(3); // Final piece
|
||||
|
||||
const reassembler = new MetadataReassembler(16384 * 2 + 100);
|
||||
|
||||
// Add pieces out of order: 2, 0, 1
|
||||
reassembler.addPiece(2, p2);
|
||||
expect(reassembler.isComplete()).toBe(false);
|
||||
expect(reassembler.getProgress()).toBeCloseTo(33.3, 1);
|
||||
|
||||
reassembler.addPiece(0, p0);
|
||||
expect(reassembler.isComplete()).toBe(false);
|
||||
|
||||
const complete = reassembler.addPiece(1, p1);
|
||||
expect(complete).toBe(true);
|
||||
|
||||
const full = reassembler.getFullMetadata()!;
|
||||
expect(full.length).toBe(16384 * 2 + 100);
|
||||
expect(full.slice(0, 16384)).toEqual(p0);
|
||||
expect(full.slice(16384, 16384 * 2)).toEqual(p1);
|
||||
expect(full.slice(16384 * 2)).toEqual(p2);
|
||||
});
|
||||
|
||||
test("returns null for incomplete metadata", () => {
|
||||
const reassembler = new MetadataReassembler(20000);
|
||||
reassembler.addPiece(0, new Uint8Array(16384));
|
||||
expect(reassembler.getFullMetadata()).toBeNull();
|
||||
});
|
||||
});
|
||||
97
packages/shared/src/protocol.ts
Normal file
97
packages/shared/src/protocol.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* BitTorrent Protocol Structures (BEP 9 & 10)
|
||||
* This file defines the structures for the Extension Protocol and Metadata Extension.
|
||||
*/
|
||||
|
||||
export interface ExtensionHandshake {
|
||||
m: { [extensionName: string]: number }; // Supported extensions and their local IDs
|
||||
p?: number; // Local TCP port
|
||||
v?: string; // Client version string
|
||||
metadata_size?: number; // Total size of the info dictionary in bytes
|
||||
}
|
||||
|
||||
export interface MetadataRequest {
|
||||
msg_type: number; // 0 for request, 1 for data, 2 for reject
|
||||
piece: number; // The piece index being requested
|
||||
total_size?: number; // Only for data messages
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulates the reassembly of metadata pieces.
|
||||
* In a real scenario, these pieces would come from different peers over individual TCP packets.
|
||||
*/
|
||||
export class MetadataReassembler {
|
||||
private pieces: Map<number, Uint8Array> = new Map();
|
||||
private totalSize: number;
|
||||
private pieceSize: number = 16384; // 16KiB
|
||||
|
||||
constructor(totalSize: number) {
|
||||
this.totalSize = totalSize;
|
||||
}
|
||||
|
||||
public addPiece(index: number, data: Uint8Array): boolean {
|
||||
this.pieces.set(index, data);
|
||||
return this.isComplete();
|
||||
}
|
||||
|
||||
public isComplete(): boolean {
|
||||
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
|
||||
return this.pieces.size === totalPieces;
|
||||
}
|
||||
|
||||
public getFullMetadata(): Uint8Array | null {
|
||||
if (!this.isComplete()) return null;
|
||||
|
||||
const fullData = new Uint8Array(this.totalSize);
|
||||
const sortedIndices = Array.from(this.pieces.keys()).sort((a, b) => a - b);
|
||||
|
||||
let offset = 0;
|
||||
for (const index of sortedIndices) {
|
||||
const piece = this.pieces.get(index)!;
|
||||
fullData.set(piece, offset);
|
||||
offset += piece.length;
|
||||
}
|
||||
|
||||
return fullData;
|
||||
}
|
||||
|
||||
public getProgress(): number {
|
||||
const totalPieces = Math.ceil(this.totalSize / this.pieceSize);
|
||||
return (this.pieces.size / totalPieces) * 100;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tracks which pieces we have and provides bitfield generation.
|
||||
*/
|
||||
export class Bitfield {
|
||||
private bits: Uint8Array;
|
||||
public totalPieces: number;
|
||||
|
||||
constructor(totalPieces: number) {
|
||||
this.totalPieces = totalPieces;
|
||||
this.bits = new Uint8Array(Math.ceil(totalPieces / 8));
|
||||
}
|
||||
|
||||
public set(index: number) {
|
||||
if (index >= this.totalPieces) return;
|
||||
const byteIndex = Math.floor(index / 8);
|
||||
const bitIndex = 7 - (index % 8);
|
||||
this.bits[byteIndex] |= (1 << bitIndex);
|
||||
}
|
||||
|
||||
public has(index: number): boolean {
|
||||
if (index >= this.totalPieces) return false;
|
||||
const byteIndex = Math.floor(index / 8);
|
||||
const bitIndex = 7 - (index % 8);
|
||||
return (this.bits[byteIndex] & (1 << bitIndex)) !== 0;
|
||||
}
|
||||
|
||||
public toBuffer(): Uint8Array {
|
||||
return this.bits;
|
||||
}
|
||||
|
||||
public fromBuffer(buffer: Uint8Array) {
|
||||
this.bits = new Uint8Array(buffer);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user