Where ideas percolate and thoughts brew

Two Paths to the Same Goal

About This Sketch

Two agents, one goal. The upper particle fights through open space, its path erratic, its energy depleting as it goes. The lower particle moves through a designed channel — same destination, less resistance, faster arrival. Neither has more "discipline." One has better architecture.

Algorithm

Two agents pursue the same goal marker on the right side of the canvas. The upper agent (willpower model) navigates via Perlin noise drift — its energy depletes over time, increasing erratic movement. The lower agent (environment design) moves through a pre-built channel that guides it directly to the goal with minimal friction. Both reach the goal, but the channel agent is faster, steadier, and costs less. They reset and repeat, illustrating the structural difference between the two models.

Pseudocode

SETUP:
  Initialize two agents at left side of canvas
  Draw channel for environment agent
  Randomize noise seeds

DRAW each frame:
  Move willpower agent via decaying-energy Perlin noise + forward bias
  Move environment agent via channel: gravity toward centerline + forward velocity
  Record trails for both agents
  When both reach goal: wait briefly, then reset
  Draw channel, trails, agents, goal markers, labels

Source Code

let sketch = function(p) {
    let agentW, agentE;
    const GOAL_X = 370;
    const GOAL_Y_W = 90;
    const GOAL_Y_E = 210;

    let wTrail = [];
    let eTrail = [];
    let wPhase = 0;
    let ePhase = 0;
    let wFinished = false;
    let eFinished = false;
    let resetTimer = 0;
    const RESET_DELAY = 80;

    function resetAgents() {
        agentW = { x: 28, y: GOAL_Y_W, vx: 0, vy: 0, energy: 1.0 };
        agentE = { x: 28, y: GOAL_Y_E, vx: 0, vy: 0 };
        wTrail = [];
        eTrail = [];
        wPhase = p.random(100);
        ePhase = p.random(100);
        wFinished = false;
        eFinished = false;
        resetTimer = 0;
    }

    p.setup = function() {
        p.createCanvas(400, 300);
        p.randomSeed(42);
        resetAgents();
    };

    p.draw = function() {
        const colors = getThemeColors();
        p.background(...colors.bg);

        let t = p.frameCount * 0.018;

        p.noStroke();
        p.fill(...colors.accent1, 22);
        p.rect(20, GOAL_Y_E - 28, GOAL_X - 20, 56, 6);

        for (let ax = 50; ax < GOAL_X - 10; ax += 38) {
            let arrowAlpha = 28 + 10 * Math.sin(t * 1.5 - ax * 0.04);
            p.fill(...colors.accent1, arrowAlpha);
            p.noStroke();
            p.triangle(ax + 7, GOAL_Y_E, ax, GOAL_Y_E - 5, ax, GOAL_Y_E + 5);
        }

        if (!wFinished) {
            agentW.energy = Math.max(0.1, agentW.energy - 0.0015);
            let noiseMag = p.map(agentW.energy, 1, 0.1, 0.6, 2.8);
            let angle = p.noise(wPhase + t * 0.7, agentW.y * 0.02) * p.TWO_PI * 2;
            agentW.vx += (Math.cos(angle) * noiseMag + 1.1) * 0.12;
            agentW.vy += Math.sin(angle) * noiseMag * 0.12;
            agentW.vx *= 0.82;
            agentW.vy *= 0.82;
            agentW.x += agentW.vx;
            agentW.y += agentW.vy;
            agentW.y = p.constrain(agentW.y, 20, GOAL_Y_W * 2 - 20);
            agentW.x = p.max(agentW.x, 20);
            wTrail.push({ x: agentW.x, y: agentW.y });
            if (wTrail.length > 60) wTrail.shift();
            if (agentW.x >= GOAL_X - 12) wFinished = true;
        }

        if (!eFinished) {
            agentE.vx += 1.3 * 0.14;
            agentE.vy += (GOAL_Y_E - agentE.y) * 0.06;
            agentE.vx *= 0.85;
            agentE.vy *= 0.82;
            agentE.x += agentE.vx;
            agentE.y += agentE.vy;
            agentE.y = p.constrain(agentE.y, GOAL_Y_E - 22, GOAL_Y_E + 22);
            agentE.x = p.max(agentE.x, 20);
            eTrail.push({ x: agentE.x, y: agentE.y });
            if (eTrail.length > 60) eTrail.shift();
            if (agentE.x >= GOAL_X - 12) eFinished = true;
        }

        if (wFinished && eFinished) {
            resetTimer++;
            if (resetTimer > RESET_DELAY) resetAgents();
        }

        p.noStroke();
        for (let i = 0; i < wTrail.length; i++) {
            p.fill(...colors.accent3, (i / wTrail.length) * 70);
            p.circle(wTrail[i].x, wTrail[i].y, 3.5);
        }
        for (let i = 0; i < eTrail.length; i++) {
            p.fill(...colors.accent1, (i / eTrail.length) * 70);
            p.circle(eTrail[i].x, eTrail[i].y, 3.5);
        }

        p.fill(...colors.accent2, 60);
        p.circle(GOAL_X, GOAL_Y_W, 26);
        p.circle(GOAL_X, GOAL_Y_E, 26);
        p.fill(...colors.accent2, 160);
        p.circle(GOAL_X, GOAL_Y_W, 12);
        p.circle(GOAL_X, GOAL_Y_E, 12);

        if (!wFinished) {
            let wPulse = 0.88 + 0.12 * Math.sin(t * 3 + wPhase);
            p.fill(...colors.accent3, 55 + agentW.energy * 80);
            p.noStroke();
            p.circle(agentW.x, agentW.y, 13 * wPulse);
            p.fill(...colors.accent3, 130 + agentW.energy * 80);
            p.circle(agentW.x, agentW.y, 7 * wPulse);
        }

        if (!eFinished) {
            let ePulse = 0.92 + 0.08 * Math.sin(t * 2 + ePhase);
            p.fill(...colors.accent2, 90);
            p.noStroke();
            p.circle(agentE.x, agentE.y, 13 * ePulse);
            p.fill(...colors.accent2, 200);
            p.circle(agentE.x, agentE.y, 7 * ePulse);
        }

        p.noStroke();
        p.fill(...colors.accent3, 110);
        p.textAlign(p.LEFT);
        p.textSize(9);
        p.text('willpower model', 22, GOAL_Y_W - 36);
        p.fill(...colors.accent2, 140);
        p.text('environment design', 22, GOAL_Y_E - 36);
        p.fill(...colors.accent3, 55);
        p.textAlign(p.CENTER);
        p.textSize(8);
        p.text('same goal \u00b7 different architecture', 200, 292);
    };
};