Skip to content

Commit e43da46

Browse files
committed
Misc bugfix and improvements to probabilities storage
1 parent d75b31a commit e43da46

5 files changed

Lines changed: 233 additions & 156 deletions

File tree

client/app.js

Lines changed: 56 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import Modal from './design-system/components/modal/modal.js';
44
import { initializeHelpModal } from './src/shell/help.js';
55
import { loadConfig } from './src/shell/config.js';
66
import { createActivityLogger } from './src/shell/activity-logger.js';
7-
import { clamp, safeNumber } from './src/shared/math.js';
7+
import { clamp, safeNumber, roundToPercentages } from './src/shared/math.js';
88
import { formatProbability } from './src/shared/format.js';
99
import { buildDeviceDefinition } from './src/probability-lab/domain/devices.js';
1010
import simulateSingleTrials from './src/probability-lab/engine/simulate-single.js';
@@ -100,6 +100,15 @@ const sliderInstances = {
100100
},
101101
};
102102

103+
// Store authoritative float probabilities as source of truth (keyed by deviceKey)
104+
const floatProbabilities = {
105+
single: new Map(), // key: 'coin', 'die', etc. -> Array<number>
106+
two: {
107+
a: new Map(),
108+
b: new Map(),
109+
},
110+
};
111+
103112
// Store speed slider instance
104113
let speedSlider = null;
105114
let settingsModal = null;
@@ -537,8 +546,15 @@ function renderOutcomeSliders(
537546
deviceKey,
538547
labels,
539548
probabilities,
540-
onChange
549+
onChange,
550+
floatProbMap
541551
) {
552+
// Get or initialize authoritative float probabilities from persistent storage
553+
if (!floatProbMap.has(deviceKey)) {
554+
floatProbMap.set(deviceKey, [...probabilities]);
555+
}
556+
let currentFloatProbs = floatProbMap.get(deviceKey);
557+
542558
// Check if sliders already exist for this device
543559
const existingKeys = [];
544560
for (const key of instanceMap.keys()) {
@@ -549,11 +565,15 @@ function renderOutcomeSliders(
549565

550566
// If sliders exist, update their values instead of recreating
551567
if (existingKeys.length > 0) {
568+
// Update source of truth from incoming probabilities
569+
currentFloatProbs = [...probabilities];
570+
floatProbMap.set(deviceKey, currentFloatProbs);
571+
const percentages = roundToPercentages(currentFloatProbs);
552572
for (let i = 0; i < labels.length; i++) {
553573
const sliderKey = `${deviceKey}-${i}`;
554574
const slider = instanceMap.get(sliderKey);
555-
if (slider) {
556-
slider.setValue(Math.round(probabilities[i] * 100), null, false);
575+
if (slider && i < percentages.length) {
576+
slider.setValue(percentages[i], null, false);
557577
}
558578
}
559579
return;
@@ -563,6 +583,9 @@ function renderOutcomeSliders(
563583
container.innerHTML = '';
564584
container.classList.remove('pl-bias-options--custom');
565585

586+
// Calculate initial percentages using largest-remainder rounding
587+
const initialPercentages = roundToPercentages(currentFloatProbs);
588+
566589
for (let i = 0; i < labels.length; i++) {
567590
const sliderKey = `${deviceKey}-${i}`;
568591
const wrapper = document.createElement('div');
@@ -583,36 +606,35 @@ function renderOutcomeSliders(
583606
min: 0,
584607
max: 100,
585608
step: 1,
586-
value: Math.round(probabilities[i] * 100),
609+
value: initialPercentages[i],
587610
showInputs: true,
588611
onChange: (percentageValue) => {
589612
const probabilityValue = percentageValue / 100;
590613

591-
// Get current probabilities from all sliders
592-
const currentProbabilities = [];
593-
for (let j = 0; j < labels.length; j++) {
594-
const otherKey = `${deviceKey}-${j}`;
595-
const otherSlider = instanceMap.get(otherKey);
596-
if (otherSlider) {
597-
const otherValue = otherSlider.getValue();
598-
currentProbabilities[j] = otherValue / 100;
599-
} else {
600-
currentProbabilities[j] = probabilities[j];
601-
}
602-
}
603-
614+
// Read other probabilities from authoritative float source (not from sliders)
615+
const currentProbabilities = floatProbMap.get(deviceKey);
604616
const normalized = normalizeProbabilities(currentProbabilities, i, probabilityValue);
605617

606-
// Update all sliders with normalized values
618+
// Convert to integer percentages using largest-remainder rounding
619+
const percentages = roundToPercentages(normalized);
620+
621+
// Derive quantized float probabilities from the integer percentages
622+
// This ensures 0% displayed = 0.0 stored = impossible in simulation
623+
const quantized = percentages.map(p => p / 100);
624+
625+
// Update source of truth in persistent storage with quantized values
626+
floatProbMap.set(deviceKey, quantized);
627+
628+
// Update all sliders with the integer percentages
607629
for (let j = 0; j < labels.length; j++) {
608630
const otherKey = `${deviceKey}-${j}`;
609631
const otherSlider = instanceMap.get(otherKey);
610-
if (otherSlider) {
611-
otherSlider.setValue(Math.round(normalized[j] * 100), null, false);
632+
if (otherSlider && j < percentages.length) {
633+
otherSlider.setValue(percentages[j], null, false);
612634
}
613635
}
614636

615-
onChange(normalized);
637+
onChange(quantized);
616638
},
617639
});
618640

@@ -621,16 +643,21 @@ function renderOutcomeSliders(
621643
}
622644

623645
function renderBiasControls(container, device, values, onChange) {
624-
// Determine which instance map to use based on container
646+
// Determine which instance map and float prob map to use based on container
625647
let instanceMap;
648+
let floatProbMap;
626649
if (container === els.biasOptions) {
627650
instanceMap = sliderInstances.single;
651+
floatProbMap = floatProbabilities.single;
628652
} else if (container === els.biasOptionsA) {
629653
instanceMap = sliderInstances.two.a;
654+
floatProbMap = floatProbabilities.two.a;
630655
} else if (container === els.biasOptionsB) {
631656
instanceMap = sliderInstances.two.b;
657+
floatProbMap = floatProbabilities.two.b;
632658
} else {
633659
instanceMap = sliderInstances.single; // fallback
660+
floatProbMap = floatProbabilities.single;
634661
}
635662

636663
const deviceKey = device;
@@ -662,7 +689,8 @@ function renderBiasControls(container, device, values, onChange) {
662689
deviceKey,
663690
outcomes,
664691
probabilities,
665-
(normalized) => onChange('customProbabilities', normalized)
692+
(normalized) => onChange('customProbabilities', normalized),
693+
floatProbMap
666694
);
667695
return;
668696
}
@@ -678,7 +706,8 @@ function renderBiasControls(container, device, values, onChange) {
678706
deviceKey,
679707
labels,
680708
probabilities,
681-
(normalized) => onChange('coinProbabilities', normalized)
709+
(normalized) => onChange('coinProbabilities', normalized),
710+
floatProbMap
682711
);
683712
return;
684713
}
@@ -692,7 +721,8 @@ function renderBiasControls(container, device, values, onChange) {
692721
deviceKey,
693722
labels,
694723
probabilities,
695-
(normalized) => onChange('dieProbabilities', normalized)
724+
(normalized) => onChange('dieProbabilities', normalized),
725+
floatProbMap
696726
);
697727
return;
698728
}

client/src/probability-lab/domain/devices.js

Lines changed: 20 additions & 107 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,23 @@ const DEVICE_META = {
1212

1313
const MAX_CUSTOM_OUTCOMES = 50;
1414

15+
/**
16+
* Sanitizes and normalizes probability arrays.
17+
* Ensures all values are non-negative and sum to 1.0.
18+
*
19+
* @param {Array<number>} probs - Array of probabilities to sanitize
20+
* @param {Array<number>} fallback - Fallback array if probs is invalid
21+
* @returns {Array<number>} Normalized probabilities summing to 1.0
22+
*/
23+
function sanitizeProbabilities(probs, fallback) {
24+
const clamped = probs.map(p => Math.max(0, p));
25+
const sum = clamped.reduce((a, p) => a + p, 0);
26+
if (sum > 0) {
27+
return clamped.map(p => p / sum);
28+
}
29+
return fallback;
30+
}
31+
1532
function baseDefinition(kind, extra = {}) {
1633
const meta = DEVICE_META[kind] ?? {};
1734
return {
@@ -80,118 +97,14 @@ export function buildDeviceDefinition(config) {
8097
if (config.device === 'coin') {
8198
const labels = ['Heads', 'Tails'];
8299
let probabilities = config.coinProbabilities ?? [0.5, 0.5];
83-
84-
// Ensure all probabilities are clamped and normalized
85-
// First, normalize the input probabilities
86-
let sum = probabilities.reduce((acc, p) => acc + p, 0);
87-
if (sum <= 0) {
88-
probabilities = [0.5, 0.5];
89-
return baseDefinition('coin', { labels, probabilities, cdf: buildCdf(probabilities) });
90-
}
91-
92-
let normalized = probabilities.map((p) => p / sum);
93-
94-
// Clamp values that are out of bounds and redistribute excess
95-
let clamped = normalized.map((p) => clamp(p, 0.01, 0.99));
96-
let clampedSum = clamped.reduce((acc, p) => acc + p, 0);
97-
98-
// If clamping reduced the sum, redistribute the difference proportionally
99-
if (clampedSum < 1.0) {
100-
const excess = 1.0 - clampedSum;
101-
// Find values that can accept more probability (those below 0.99)
102-
const canAcceptMore = clamped.map((p, i) => p < 0.99 ? i : -1).filter(i => i >= 0);
103-
if (canAcceptMore.length > 0) {
104-
// Distribute excess proportionally among values that can accept more
105-
const weights = canAcceptMore.map(i => clamped[i]);
106-
const weightSum = weights.reduce((acc, w) => acc + w, 0);
107-
if (weightSum > 0) {
108-
canAcceptMore.forEach((idx, i) => {
109-
clamped[idx] = Math.min(0.99, clamped[idx] + (weights[i] / weightSum) * excess);
110-
});
111-
} else {
112-
// Equal distribution if weights are zero
113-
const perValue = excess / canAcceptMore.length;
114-
canAcceptMore.forEach(idx => {
115-
clamped[idx] = Math.min(0.99, clamped[idx] + perValue);
116-
});
117-
}
118-
}
119-
}
120-
121-
// Final normalization to ensure sum is exactly 1.0
122-
clampedSum = clamped.reduce((acc, p) => acc + p, 0);
123-
if (clampedSum > 0) {
124-
probabilities = clamped.map((p) => p / clampedSum);
125-
// Final clamp check (shouldn't be needed, but safety check)
126-
probabilities = probabilities.map((p) => clamp(p, 0.01, 0.99));
127-
const finalSum = probabilities.reduce((acc, p) => acc + p, 0);
128-
if (finalSum > 0) {
129-
probabilities = probabilities.map((p) => p / finalSum);
130-
}
131-
} else {
132-
probabilities = [0.5, 0.5];
133-
}
134-
100+
probabilities = sanitizeProbabilities(probabilities, [0.5, 0.5]);
135101
return baseDefinition('coin', { labels, probabilities, cdf: buildCdf(probabilities) });
136102
}
137103

138104
if (config.device === 'die') {
139105
const labels = ['1', '2', '3', '4', '5', '6'];
140-
let probabilities;
141-
142-
probabilities = config.dieProbabilities ?? [1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6];
143-
144-
// Ensure all probabilities are clamped and normalized
145-
// First, normalize the input probabilities
146-
let sum = probabilities.reduce((acc, p) => acc + p, 0);
147-
if (sum <= 0) {
148-
probabilities = [1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6];
149-
return baseDefinition('die', { labels, probabilities, cdf: buildCdf(probabilities) });
150-
}
151-
152-
let normalized = probabilities.map((p) => p / sum);
153-
154-
// Clamp values that are out of bounds and redistribute excess
155-
let clamped = normalized.map((p) => clamp(p, 0.01, 0.8));
156-
let clampedSum = clamped.reduce((acc, p) => acc + p, 0);
157-
158-
// If clamping reduced the sum, redistribute the difference proportionally
159-
if (clampedSum < 1.0) {
160-
const excess = 1.0 - clampedSum;
161-
// Find values that can accept more probability (those below 0.8)
162-
const canAcceptMore = clamped.map((p, i) => p < 0.8 ? i : -1).filter(i => i >= 0);
163-
if (canAcceptMore.length > 0) {
164-
// Distribute excess proportionally among values that can accept more
165-
const weights = canAcceptMore.map(i => clamped[i]);
166-
const weightSum = weights.reduce((acc, w) => acc + w, 0);
167-
if (weightSum > 0) {
168-
canAcceptMore.forEach((idx, i) => {
169-
clamped[idx] = Math.min(0.8, clamped[idx] + (weights[i] / weightSum) * excess);
170-
});
171-
} else {
172-
// Equal distribution if weights are zero
173-
const perValue = excess / canAcceptMore.length;
174-
canAcceptMore.forEach(idx => {
175-
clamped[idx] = Math.min(0.8, clamped[idx] + perValue);
176-
});
177-
}
178-
}
179-
}
180-
181-
// Final normalization to ensure sum is exactly 1.0
182-
clampedSum = clamped.reduce((acc, p) => acc + p, 0);
183-
if (clampedSum > 0) {
184-
probabilities = clamped.map((p) => p / clampedSum);
185-
// Final clamp check (shouldn't be needed, but safety check)
186-
probabilities = probabilities.map((p) => clamp(p, 0.01, 0.8));
187-
const finalSum = probabilities.reduce((acc, p) => acc + p, 0);
188-
if (finalSum > 0) {
189-
probabilities = probabilities.map((p) => p / finalSum);
190-
}
191-
} else {
192-
probabilities = [1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6];
193-
}
194-
106+
let probabilities = config.dieProbabilities ?? [1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6];
107+
probabilities = sanitizeProbabilities(probabilities, [1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6, 1 / 6]);
195108
return baseDefinition('die', { labels, probabilities, cdf: buildCdf(probabilities) });
196109
}
197110

client/src/shared/math.js

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,45 @@ export function safeNumber(value, fallback) {
1111
return Number.isFinite(parsed) ? parsed : fallback;
1212
}
1313

14+
/**
15+
* Converts float probabilities to integer percentages using largest-remainder rounding.
16+
* Guarantees the result sums to exactly 100.
17+
*
18+
* @param {Array<number>} probabilities - Array of float probabilities (should sum to 1.0)
19+
* @returns {Array<number>} Array of integers (0-100) that sum to exactly 100
20+
*/
21+
export function roundToPercentages(probabilities) {
22+
if (!Array.isArray(probabilities) || probabilities.length === 0) {
23+
return [];
24+
}
25+
26+
// Step 1: Floor each probability × 100 to get integer parts
27+
const floors = probabilities.map(p => Math.floor(p * 100));
28+
const floorSum = floors.reduce((sum, f) => sum + f, 0);
29+
30+
// Step 2: Calculate shortfall
31+
const shortfall = 100 - floorSum;
32+
33+
if (shortfall <= 0) {
34+
// Already at or above 100, return floors (shouldn't happen with valid probabilities)
35+
return floors;
36+
}
37+
38+
// Step 3: Calculate fractional remainders and create index-remainder pairs
39+
const remainders = probabilities.map((p, i) => ({
40+
index: i,
41+
remainder: (p * 100) - floors[i],
42+
}));
43+
44+
// Step 4: Sort by remainder (descending)
45+
remainders.sort((a, b) => b.remainder - a.remainder);
46+
47+
// Step 5: Distribute shortfall one point at a time to largest remainders
48+
const result = [...floors];
49+
for (let i = 0; i < shortfall && i < remainders.length; i++) {
50+
result[remainders[i].index]++;
51+
}
52+
53+
return result;
54+
}
55+

0 commit comments

Comments
 (0)