rspace-online/modules/rcred/credrank.ts

128 lines
3.6 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/**
* CredRank — power iteration (modified PageRank) on a contribution graph.
*
* Pure function, no side effects. Takes nodes + edges + config,
* returns stationary distribution (nodeId → raw cred score).
*
* Algorithm:
* 1. Build adjacency from edges (weighted directed graph)
* 2. Normalize outgoing weights → transition probabilities
* 3. Seed vector from contribution node weights
* 4. Power iteration: π' = (1-α) × M^T × π + α × seed
* 5. Return full distribution
*/
import type { CredNode, CredEdge, CredConfigDoc } from './schemas';
const MAX_ITERATIONS = 50;
const CONVERGENCE_THRESHOLD = 1e-6;
/**
* Compute CredRank scores via power iteration.
*
* @returns Map<nodeId, rawCredScore> — stationary distribution summing to ~1.0
*/
export function computeCredRank(
nodes: CredNode[],
edges: CredEdge[],
config: CredConfigDoc,
): Map<string, number> {
const n = nodes.length;
if (n === 0) return new Map();
// Build node index
const nodeIndex = new Map<string, number>();
for (let i = 0; i < n; i++) {
nodeIndex.set(nodes[i].id, i);
}
// Build adjacency: outgoing[i] = [(targetIndex, weight)]
const outgoing: Array<Array<[number, number]>> = Array.from({ length: n }, () => []);
for (const edge of edges) {
const fromIdx = nodeIndex.get(edge.from);
const toIdx = nodeIndex.get(edge.to);
if (fromIdx === undefined || toIdx === undefined) continue;
outgoing[fromIdx].push([toIdx, edge.weight]);
}
// Normalize outgoing weights → transition probabilities
const transition: Array<Array<[number, number]>> = Array.from({ length: n }, () => []);
for (let i = 0; i < n; i++) {
const out = outgoing[i];
if (out.length === 0) continue;
const totalWeight = out.reduce((s, [, w]) => s + w, 0);
if (totalWeight <= 0) continue;
for (const [target, weight] of out) {
transition[i].push([target, weight / totalWeight]);
}
}
// Build seed vector from node weights (contribution nodes carry configured weight)
const seed = new Float64Array(n);
let seedSum = 0;
for (let i = 0; i < n; i++) {
const node = nodes[i];
if (node.type === 'contribution' && node.weight > 0) {
seed[i] = node.weight;
seedSum += node.weight;
}
}
// Normalize seed to sum to 1
if (seedSum > 0) {
for (let i = 0; i < n; i++) seed[i] /= seedSum;
} else {
// Uniform seed if no weighted contributions
const uniform = 1 / n;
for (let i = 0; i < n; i++) seed[i] = uniform;
}
// Initialize π uniformly
let pi = new Float64Array(n);
const initVal = 1 / n;
for (let i = 0; i < n; i++) pi[i] = initVal;
const alpha = config.dampingFactor; // teleportation probability
// Power iteration
for (let iter = 0; iter < MAX_ITERATIONS; iter++) {
const piNext = new Float64Array(n);
// Matrix-vector multiply: piNext[j] += (1-α) × transition[i→j] × pi[i]
for (let i = 0; i < n; i++) {
if (pi[i] === 0) continue;
const neighbors = transition[i];
if (neighbors.length === 0) {
// Dangling node — distribute uniformly (standard PageRank)
const share = (1 - alpha) * pi[i] / n;
for (let j = 0; j < n; j++) piNext[j] += share;
} else {
for (const [j, prob] of neighbors) {
piNext[j] += (1 - alpha) * prob * pi[i];
}
}
}
// Add teleportation (seed-weighted)
for (let i = 0; i < n; i++) {
piNext[i] += alpha * seed[i];
}
// Check convergence (L1 norm)
let delta = 0;
for (let i = 0; i < n; i++) delta += Math.abs(piNext[i] - pi[i]);
pi = piNext;
if (delta < CONVERGENCE_THRESHOLD) break;
}
// Build result map
const result = new Map<string, number>();
for (let i = 0; i < n; i++) {
result.set(nodes[i].id, pi[i]);
}
return result;
}