Skip to content

Commit

Permalink
bugfix/message-from-before-creation-was-send (#1295)
Browse files Browse the repository at this point in the history
* FIX got too many messages

* FIX node method

* CLEAR npm cache

* UPDATE deps

* FIX missing event

* FIX e2e tests

* FIX tests
  • Loading branch information
pubkey authored Nov 27, 2023
1 parent 190aeac commit a278986
Show file tree
Hide file tree
Showing 16 changed files with 8,074 additions and 425 deletions.
22 changes: 11 additions & 11 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
- name: Setup Node.js environment
uses: actions/[email protected]
with:
node-version: '20.9.0'
node-version-file: ".nvmrc"

# https://docs.github.com/en/free-pro-team@latest/actions/guides/caching-dependencies-to-speed-up-workflows
- name: Reuse npm cache folder
Expand All @@ -46,10 +46,10 @@ jobs:
./node_modules
./test-electron/node_modules
# invalidate cache when any package.json changes
key: ${{ runner.os }}-npm-x2-${{ env.cache-name }}-${{ hashFiles('**/package.json') }}
key: ${{ runner.os }}-npm-x3-${{ env.cache-name }}-${{ hashFiles('**/package.json') }}
restore-keys: |
${{ runner.os }}-npm-x2-${{ env.cache-name }}-
${{ runner.os }}-npm-x2-
${{ runner.os }}-npm-x3-${{ env.cache-name }}-
${{ runner.os }}-npm-x3-
${{ runner.os }}-
# install
Expand Down Expand Up @@ -117,9 +117,9 @@ jobs:
./node_modules
./test-electron/node_modules
# invalidate cache when any package.json changes
key: ${{ runner.os }}-npm-test-node-x2-${{ matrix.node }}-${{ env.cache-name }}-${{ hashFiles('**/package.json') }}
key: ${{ runner.os }}-npm-test-node-x3-${{ matrix.node }}-${{ env.cache-name }}-${{ hashFiles('**/package.json') }}
restore-keys: |
${{ runner.os }}-npm-test-node-x2-${{ matrix.node }}-${{ env.cache-name }}-
${{ runner.os }}-npm-test-node-x3-${{ matrix.node }}-${{ env.cache-name }}-
${{ runner.os }}-npm-test-node
${{ runner.os }}-test-node
Expand All @@ -140,22 +140,22 @@ jobs:
- name: Setup Node.js environment
uses: actions/[email protected]
with:
node-version: '20.9.0'
node-version-file: ".nvmrc"

# https://docs.github.com/en/free-pro-team@latest/actions/guides/caching-dependencies-to-speed-up-workflows
- name: Reuse npm cache folder
uses: actions/cache@v3
env:
cache-name: cache-node-modules
cache-name: cache-node-deno-modules
with:
path: |
~/.npm
./node_modules
./test-electron/node_modules
# invalidate cache when any package.json changes
key: ${{ runner.os }}-npm-test-deno-x2-${{ env.cache-name }}-${{ hashFiles('**/package.json') }}
key: ${{ runner.os }}-npm-test-deno-x3-${{ env.cache-name }}-${{ hashFiles('**/package.json') }}
restore-keys: |
${{ runner.os }}-npm-test-deno-x2-${{ env.cache-name }}-
${{ runner.os }}-npm-test-deno-x3-${{ env.cache-name }}-
${{ runner.os }}-npm-test-deno
${{ runner.os }}-test-deno
Expand All @@ -174,7 +174,7 @@ jobs:
/home/runner/.cache/deno
# do not cache based on package.json because deno install randomly fails
# and it would then never succeed on the first run on dependency updateds
key: ${{ runner.os }}-deno-x2-
key: ${{ runner.os }}-deno-x3-

- uses: denoland/setup-deno@v1
with:
Expand Down
1 change: 1 addition & 0 deletions .nvmrc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
20.10.0
7,990 changes: 7,815 additions & 175 deletions docs/e2e.js

Large diffs are not rendered by default.

62 changes: 27 additions & 35 deletions docs/iframe.js
Original file line number Diff line number Diff line change
Expand Up @@ -238,18 +238,7 @@ function _startListening(channel) {

var listenerFn = function listenerFn(msgObj) {
channel._addEL[msgObj.type].forEach(function (listenerObject) {
/**
* Getting the current time in JavaScript has no good precision.
* So instead of only listening to events that happened 'after' the listener
* was added, we also listen to events that happened 100ms before it.
* This ensures that when another process, like a WebWorker, sends events
* we do not miss them out because their timestamp is a bit off compared to the main process.
* Not doing this would make messages missing when we send data directly after subscribing and awaiting a response.
* @link https://johnresig.com/blog/accuracy-of-javascript-time/
*/
var hundredMsInMicro = 100 * 1000;
var minMessageTime = listenerObject.time - hundredMsInMicro;
if (msgObj.time >= minMessageTime) {
if (msgObj.time >= listenerObject.time) {
listenerObject.fn(msgObj.data);
}
});
Expand Down Expand Up @@ -1392,14 +1381,15 @@ var microSeconds = exports.microSeconds = _util.microSeconds;
var type = exports.type = 'native';
function create(channelName) {
var state = {
time: (0, _util.microSeconds)(),
messagesCallback: null,
bc: new BroadcastChannel(channelName),
subFns: [] // subscriberFunctions
};

state.bc.onmessage = function (msg) {
state.bc.onmessage = function (msgEvent) {
if (state.messagesCallback) {
state.messagesCallback(msg.data);
state.messagesCallback(msgEvent.data);
}
};
return state;
Expand Down Expand Up @@ -1455,7 +1445,7 @@ var NativeMethod = exports.NativeMethod = {
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.SimulateMethod = void 0;
exports.SimulateMethod = exports.SIMULATE_DELAY_TIME = void 0;
exports.averageResponseTime = averageResponseTime;
exports.canBeUsed = canBeUsed;
exports.close = close;
Expand All @@ -1470,30 +1460,36 @@ var type = exports.type = 'simulate';
var SIMULATE_CHANNELS = new Set();
function create(channelName) {
var state = {
time: microSeconds(),
name: channelName,
messagesCallback: null
};
console.log('created channel ' + state.counter);
SIMULATE_CHANNELS.add(state);
return state;
}
function close(channelState) {
SIMULATE_CHANNELS["delete"](channelState);
}
var SIMULATE_DELAY_TIME = exports.SIMULATE_DELAY_TIME = 5;
function postMessage(channelState, messageJson) {
return new Promise(function (res) {
return setTimeout(function () {
var channelArray = Array.from(SIMULATE_CHANNELS);
channelArray.filter(function (channel) {
return channel.name === channelState.name;
}).filter(function (channel) {
return channel !== channelState;
}).filter(function (channel) {
return !!channel.messagesCallback;
}).forEach(function (channel) {
return channel.messagesCallback(messageJson);
channelArray.forEach(function (channel) {
if (channel.name === channelState.name &&
// has same name
channel !== channelState &&
// not own channel
!!channel.messagesCallback &&
// has subscribers
channel.time < messageJson.time // channel not created after postMessage() call
) {
channel.messagesCallback(messageJson);
}
});
res();
}, 5);
}, SIMULATE_DELAY_TIME);
});
}
function onMessage(channelState, fn) {
Expand All @@ -1503,7 +1499,7 @@ function canBeUsed() {
return true;
}
function averageResponseTime() {
return 5;
return SIMULATE_DELAY_TIME;
}
var SimulateMethod = exports.SimulateMethod = {
create: create,
Expand Down Expand Up @@ -1596,25 +1592,21 @@ function randomToken() {
return Math.random().toString(36).substring(2);
}
var lastMs = 0;
var additional = 0;

/**
* returns the current time in micro-seconds,
* Returns the current unix time in micro-seconds,
* WARNING: This is a pseudo-function
* Performance.now is not reliable in webworkers, so we just make sure to never return the same time.
* This is enough in browsers, and this function will not be used in nodejs.
* The main reason for this hack is to ensure that BroadcastChannel behaves equal to production when it is used in fast-running unit tests.
*/
function microSeconds() {
var ms = Date.now();
if (ms === lastMs) {
additional++;
return ms * 1000 + additional;
} else {
lastMs = ms;
additional = 0;
return ms * 1000;
var ret = Date.now() * 1000; // milliseconds to microseconds
if (ret <= lastMs) {
ret = lastMs + 1;
}
lastMs = ret;
return ret;
}

/**
Expand Down
62 changes: 27 additions & 35 deletions docs/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -238,18 +238,7 @@ function _startListening(channel) {

var listenerFn = function listenerFn(msgObj) {
channel._addEL[msgObj.type].forEach(function (listenerObject) {
/**
* Getting the current time in JavaScript has no good precision.
* So instead of only listening to events that happened 'after' the listener
* was added, we also listen to events that happened 100ms before it.
* This ensures that when another process, like a WebWorker, sends events
* we do not miss them out because their timestamp is a bit off compared to the main process.
* Not doing this would make messages missing when we send data directly after subscribing and awaiting a response.
* @link https://johnresig.com/blog/accuracy-of-javascript-time/
*/
var hundredMsInMicro = 100 * 1000;
var minMessageTime = listenerObject.time - hundredMsInMicro;
if (msgObj.time >= minMessageTime) {
if (msgObj.time >= listenerObject.time) {
listenerObject.fn(msgObj.data);
}
});
Expand Down Expand Up @@ -1392,14 +1381,15 @@ var microSeconds = exports.microSeconds = _util.microSeconds;
var type = exports.type = 'native';
function create(channelName) {
var state = {
time: (0, _util.microSeconds)(),
messagesCallback: null,
bc: new BroadcastChannel(channelName),
subFns: [] // subscriberFunctions
};

state.bc.onmessage = function (msg) {
state.bc.onmessage = function (msgEvent) {
if (state.messagesCallback) {
state.messagesCallback(msg.data);
state.messagesCallback(msgEvent.data);
}
};
return state;
Expand Down Expand Up @@ -1455,7 +1445,7 @@ var NativeMethod = exports.NativeMethod = {
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.SimulateMethod = void 0;
exports.SimulateMethod = exports.SIMULATE_DELAY_TIME = void 0;
exports.averageResponseTime = averageResponseTime;
exports.canBeUsed = canBeUsed;
exports.close = close;
Expand All @@ -1470,30 +1460,36 @@ var type = exports.type = 'simulate';
var SIMULATE_CHANNELS = new Set();
function create(channelName) {
var state = {
time: microSeconds(),
name: channelName,
messagesCallback: null
};
console.log('created channel ' + state.counter);
SIMULATE_CHANNELS.add(state);
return state;
}
function close(channelState) {
SIMULATE_CHANNELS["delete"](channelState);
}
var SIMULATE_DELAY_TIME = exports.SIMULATE_DELAY_TIME = 5;
function postMessage(channelState, messageJson) {
return new Promise(function (res) {
return setTimeout(function () {
var channelArray = Array.from(SIMULATE_CHANNELS);
channelArray.filter(function (channel) {
return channel.name === channelState.name;
}).filter(function (channel) {
return channel !== channelState;
}).filter(function (channel) {
return !!channel.messagesCallback;
}).forEach(function (channel) {
return channel.messagesCallback(messageJson);
channelArray.forEach(function (channel) {
if (channel.name === channelState.name &&
// has same name
channel !== channelState &&
// not own channel
!!channel.messagesCallback &&
// has subscribers
channel.time < messageJson.time // channel not created after postMessage() call
) {
channel.messagesCallback(messageJson);
}
});
res();
}, 5);
}, SIMULATE_DELAY_TIME);
});
}
function onMessage(channelState, fn) {
Expand All @@ -1503,7 +1499,7 @@ function canBeUsed() {
return true;
}
function averageResponseTime() {
return 5;
return SIMULATE_DELAY_TIME;
}
var SimulateMethod = exports.SimulateMethod = {
create: create,
Expand Down Expand Up @@ -1596,25 +1592,21 @@ function randomToken() {
return Math.random().toString(36).substring(2);
}
var lastMs = 0;
var additional = 0;

/**
* returns the current time in micro-seconds,
* Returns the current unix time in micro-seconds,
* WARNING: This is a pseudo-function
* Performance.now is not reliable in webworkers, so we just make sure to never return the same time.
* This is enough in browsers, and this function will not be used in nodejs.
* The main reason for this hack is to ensure that BroadcastChannel behaves equal to production when it is used in fast-running unit tests.
*/
function microSeconds() {
var ms = Date.now();
if (ms === lastMs) {
additional++;
return ms * 1000 + additional;
} else {
lastMs = ms;
additional = 0;
return ms * 1000;
var ret = Date.now() * 1000; // milliseconds to microseconds
if (ret <= lastMs) {
ret = lastMs + 1;
}
lastMs = ret;
return ret;
}

/**
Expand Down
Loading

0 comments on commit a278986

Please sign in to comment.