diff --git a/.github/workflows/test-browser.yml b/.github/workflows/test-browser.yml index e5a2f4543..bf0f504f1 100644 --- a/.github/workflows/test-browser.yml +++ b/.github/workflows/test-browser.yml @@ -25,11 +25,69 @@ jobs: with: node-version: 20.x - run: npm ci + + # Set up Python (for pipx) + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + # Install pipx (for mitmproxy) + # See https://pipx.pypa.io/stable/installation/ + - name: Install pipx + run: | + python3 -m pip install --user pipx + sudo pipx --global ensurepath + + # https://docs.mitmproxy.org/stable/overview-installation/#installation-from-the-python-package-index-pypi + - name: Install mitmproxy + run: | + pipx install mitmproxy + # We use this library in our addon + pipx inject mitmproxy websockets + + - name: Generate mitmproxy SSL certs + run: npx --package interception-proxy generate-mitmproxy-certs + + - name: Start interception proxy server + run: npx --package interception-proxy start-service + - name: Install Playwright browsers and dependencies run: npx playwright install --with-deps - - env: + + # For certutil + - name: Install NSS tools + run: sudo apt install libnss3-tools + + # This is for Chromium (see https://chromium.googlesource.com/chromium/src/+/master/docs/linux/cert_management.md) + # Note this is the same command that we use for adding it to the Firefox profile (see playwrightHelpers.js) + - name: Install mitmproxy root CA in NSS shared DB + run: | + mkdir -p ~/.pki/nssdb + certutil -A -d sql:$HOME/.pki/nssdb -t "C" -n "Mitmproxy Root Cert" -i ~/.mitmproxy/mitmproxy-ca-cert.pem + certutil -L -d sql:$HOME/.pki/nssdb + + # This is for WebKit (I think because it uses OpenSSL) + - name: Install mitmproxy root CA in /usr/local/share/ca-certificates + run: | + sudo cp ~/.mitmproxy/mitmproxy-ca-cert.cer /usr/local/share/ca-certificates/mitmproxy-ca-cert.crt + sudo update-ca-certificates + + - name: Run the tests + env: PLAYWRIGHT_BROWSER: ${{ matrix.browser }} run: npm run test:playwright + + - name: Save interception proxy server logs + if: always() + run: sudo journalctl -u ably-sdk-test-proxy.service > interception-proxy-logs.txt + + - name: Upload interception proxy server logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: interception-proxy-logs-${{ matrix.browser }} + path: interception-proxy-logs.txt + - name: Generate private API usage reports run: npm run process-private-api-data private-api-usage/*.json - name: Save private API usage data diff --git a/.github/workflows/test-node.yml b/.github/workflows/test-node.yml index d74d0b3db..66765ca60 100644 --- a/.github/workflows/test-node.yml +++ b/.github/workflows/test-node.yml @@ -25,9 +25,120 @@ jobs: with: node-version: ${{ matrix.node-version }} - run: npm ci - - run: npm run test:node + + # Set up Python (for pipx) + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + # Install pipx (for mitmproxy) + # See https://pipx.pypa.io/stable/installation/ + - name: Install pipx + run: | + python3 -m pip install --user pipx + sudo pipx --global ensurepath + + # https://docs.mitmproxy.org/stable/overview-installation/#installation-from-the-python-package-index-pypi + - name: Install mitmproxy + run: | + pipx install mitmproxy + # We use this library in our addon + pipx inject mitmproxy websockets + + - name: Create a user to run the tests + run: sudo useradd --create-home ably-test-user + + - name: Create a group for sharing the working directory + run: | + sudo groupadd ably-test-users + # Add relevant users to the group + sudo usermod --append --groups ably-test-users $USER + sudo usermod --append --groups ably-test-users ably-test-user + # Give the group ownership of the working directory and everything under it... + sudo chown -R :ably-test-users . + # ...and give group members full read/write access to its contents (i.e. rw access to files, rwx access to directories) + # (We use xargs because `find` does not fail if an `exec` command fails; see https://serverfault.com/a/905039) + find . -type f -print0 | xargs -n1 -0 chmod g+rw + find . -type d -print0 | xargs -n1 -0 chmod g+rwx + # TODO understand better + # + # This is to make `npm run` work when run as ably-test-user; else it fails because of a `statx()` call on package.json: + # + # > 2024-04-17T13:08:09.1302251Z [pid 2051] statx(AT_FDCWD, `"/home/runner/work/ably-js/ably-js/package.json"`, AT_STATX_SYNC_AS_STAT, STATX_ALL, 0x7f4875ffcb40) = -1 EACCES (Permission denied) + # + # statx documentation says: + # + # > in the case of **statx**() with a pathname, execute (search) permission is required on all of the directories in _pathname_ that lead to the file. + # + # The fact that I’m having to do this probably means that I’m doing something inappropriate elsewhere. (And I don’t know what the other consequences of doing this might be.) + chmod o+x ~ + + # TODO set umask appropriately, so that new files created are readable/writable by the group + + - name: Generate mitmproxy SSL certs + run: npx --package interception-proxy generate-mitmproxy-certs + + - name: Set up iptables rules + run: | + # The rules suggested by mitmproxy etc are aimed at intercepting _all_ the outgoing traffic on a machine. I don’t want that, given that we want to be able to run this test suite on developers’ machines in a non-invasive manner. Instead we just want to target traffic generated by the process that contains the Ably SDK, which we’ll make identifable by iptables by running that process as a specific user created for that purpose (ably-test-user). + # + # Relevant parts of iptables documentation: + # + # nat: + # > This table is consulted when a packet that creates a new connection is encountered. It consists of three built-ins: PREROUTING (for altering packets as soon as they come in), OUTPUT (for altering locally-generated packets before routing), and POSTROUTING (for altering packets as they are about to go out). + # + # owner: + # > This module attempts to match various characteristics of the packet creator, for locally-generated packets. It is only valid in the OUTPUT chain, and even this some packets (such as ICMP ping responses) may have no owner, and hence never match. + # + # REDIRECT: + # > This target is only valid in the nat table, in the PREROUTING and OUTPUT chains, and user-defined chains which are only called from those chains. It redirects the packet to the machine itself by changing the destination IP to the primary address of the incoming interface (locally-generated packets are mapped to the 127.0.0.1 address). It takes one option: + # > + # > --to-ports port[-port] + # > This specifies a destination port or range of ports to use: without this, the destination port is never altered. This is only valid if the rule also specifies -p tcp or -p udp. + # + # I don’t exactly understand what the nat table means; I assume its rules apply to all _subsequent_ packets in the connection, too? + # + # So, what I expect to happen: + # + # 1. iptables rule causes default-port HTTP(S) datagram from test process to get its destination IP rewritten to 127.0.0.1, and rewrites the TCP header’s destination port to 8080 + # 2. 127.0.0.1 destination causes OS’s routing to send this datagram on the loopback interface + # 3. nature of the loopback interface means that this datagram is then received on the loopback interface + # 4. mitmproxy, listening on port 8080 (not sure how or why it uses a single port for both non-TLS and TLS traffic) receives these datagrams, and uses Host header or SNI to figure out where they were originally destined. + # + # TODO (how) do we achieve the below on macOS? I have a feeling that it’s currently just working by accident; e.g. it's because the TCP connection to the control server exists before we start mitmproxy and hence the connection doesn’t get passed to its NETransparentProxyProvider or something. To be on the safe side, though, I’ve added a check in the mitmproxy addon so that we only mess with stuff for ports 80 or 443 + # + # Note that in the current setup with ably-js, the test suite and the Ably SDK run in the same process. We want to make sure that we don’t intercept the test suite’s WebSocket communications with the interception proxy’s control API (which it serves at 127.0.0.1:8001), hence only targeting the default HTTP(S) ports. (TODO consider that Realtime team also run a Realtime on non-default ports when testing locally) + sudo iptables --table nat --append OUTPUT --match owner --uid-owner ably-test-user --protocol tcp --destination-port 80 --jump REDIRECT --to-ports 8080 + sudo iptables --table nat --append OUTPUT --match owner --uid-owner ably-test-user --protocol tcp --destination-port 443 --jump REDIRECT --to-ports 8080 + sudo ip6tables --table nat --append OUTPUT --match owner --uid-owner ably-test-user --protocol tcp --destination-port 80 --jump REDIRECT --to-ports 8080 + sudo ip6tables --table nat --append OUTPUT --match owner --uid-owner ably-test-user --protocol tcp --destination-port 443 --jump REDIRECT --to-ports 8080 + + # TODO how will this behave with: + # + # 1. the WebSocket connection from test suite to control API (see above note; not a problem in this CI setup, think about it on macOS) + # 2. the WebSocket connection from mitmproxy to control API (not an issue on Linux or macOS with our current setup since we don’t intercept any traffic from mitmproxy) + # 3. the WebSocket connections that mitmproxy proxies to the interception proxy (which it sends to localhost:8002) (ditto 2) + # 4. the WebSocket connections for which interception proxy is a client (not an issue for Linux or macOS with our current setup since we don’t intercept any traffic from interception proxy) + + - name: Start interception proxy server + run: npx --package interception-proxy start-service + + - name: Run the tests + run: sudo -u ably-test-user NODE_EXTRA_CA_CERTS=~/.mitmproxy/mitmproxy-ca-cert.pem npm run test:node env: CI: true + + - name: Save interception proxy server logs + if: always() + run: sudo journalctl -u ably-sdk-test-proxy.service > interception-proxy-logs.txt + + - name: Upload interception proxy server logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: interception-proxy-logs-${{ matrix.node-version }} + path: interception-proxy-logs.txt + - name: Generate private API usage reports run: npm run process-private-api-data private-api-usage/*.json - name: Save private API usage data diff --git a/.gitignore b/.gitignore index f98f1bc7b..7fdc9ad51 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ junit/ private-api-usage/ private-api-usage-reports/ test/support/mocha_junit_reporter/build/ +test/support/json-rpc-2.0/build/ diff --git a/Gruntfile.js b/Gruntfile.js index a173c6913..1c5bf078b 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -47,7 +47,12 @@ module.exports = function (grunt) { dirs: dirs, webpack: { all: Object.values(webpackConfig), - browser: [webpackConfig.browser, webpackConfig.browserMin, webpackConfig.mochaJUnitReporterBrowser], + browser: [ + webpackConfig.browser, + webpackConfig.browserMin, + webpackConfig.mochaJUnitReporterBrowser, + webpackConfig.jsonRPC, + ], }, }; diff --git a/package-lock.json b/package-lock.json index 5142b85ab..b218e78ee 100644 --- a/package-lock.json +++ b/package-lock.json @@ -53,8 +53,10 @@ "grunt-shell": "~1.1", "grunt-webpack": "^5.0.0", "hexy": "~0.2", + "interception-proxy": "github:ably-labs/interception-proxy", "jmespath": "^0.16.0", "jsdom": "^20.0.0", + "json-rpc-2.0": "^1.7.0", "minimist": "^1.2.5", "mocha": "^8.1.3", "mocha-junit-reporter": "^2.2.1", @@ -6073,6 +6075,21 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "dev": true }, + "node_modules/interception-proxy": { + "version": "0.1.0", + "resolved": "git+ssh://git@github.com/ably-labs/interception-proxy.git#ff9e99451ed3971a1dd929d2f5cb80d4896269b6", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "json-rpc-2.0": "^1.7.0", + "ws": "^8.18.0" + }, + "bin": { + "generate-mitmproxy-certs": "bin/generate-mitmproxy-certs", + "interception-proxy": "bin/interception-proxy", + "start-service": "bin/start-service" + } + }, "node_modules/internal-slot": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", @@ -6801,6 +6818,12 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, + "node_modules/json-rpc-2.0": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/json-rpc-2.0/-/json-rpc-2.0-1.7.0.tgz", + "integrity": "sha512-asnLgC1qD5ytP+fvBP8uL0rvj+l8P6iYICbzZ8dVxCpESffVjzA7KkYkbKCIbavs7cllwH1ZUaNtJwphdeRqpg==", + "dev": true + }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -11048,9 +11071,9 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", "engines": { "node": ">=10.0.0" }, @@ -15547,6 +15570,15 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "dev": true }, + "interception-proxy": { + "version": "git+ssh://git@github.com/ably-labs/interception-proxy.git#ff9e99451ed3971a1dd929d2f5cb80d4896269b6", + "dev": true, + "from": "interception-proxy@github:ably-labs/interception-proxy", + "requires": { + "json-rpc-2.0": "^1.7.0", + "ws": "^8.18.0" + } + }, "internal-slot": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", @@ -16058,6 +16090,12 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, + "json-rpc-2.0": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/json-rpc-2.0/-/json-rpc-2.0-1.7.0.tgz", + "integrity": "sha512-asnLgC1qD5ytP+fvBP8uL0rvj+l8P6iYICbzZ8dVxCpESffVjzA7KkYkbKCIbavs7cllwH1ZUaNtJwphdeRqpg==", + "dev": true + }, "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -19122,9 +19160,9 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", "requires": {} }, "xml": { diff --git a/package.json b/package.json index d146d2d7a..9a3d6e681 100644 --- a/package.json +++ b/package.json @@ -97,8 +97,10 @@ "grunt-shell": "~1.1", "grunt-webpack": "^5.0.0", "hexy": "~0.2", + "interception-proxy": "github:ably-labs/interception-proxy", "jmespath": "^0.16.0", "jsdom": "^20.0.0", + "json-rpc-2.0": "^1.7.0", "minimist": "^1.2.5", "mocha": "^8.1.3", "mocha-junit-reporter": "^2.2.1", @@ -142,6 +144,7 @@ "test:node:skip-build": "mocha", "test:webserver": "grunt test:webserver", "test:playwright": "node test/support/runPlaywrightTests.js", + "test:playwright:open-browser": "node test/support/openPlaywrightBrowser.js", "test:react": "vitest run", "test:package": "grunt test:package", "concat": "grunt concat", diff --git a/test/common/globals/named_dependencies.js b/test/common/globals/named_dependencies.js index e06d92598..2711ca258 100644 --- a/test/common/globals/named_dependencies.js +++ b/test/common/globals/named_dependencies.js @@ -18,9 +18,16 @@ define(function () { async: { browser: 'node_modules/async/lib/async' }, chai: { browser: 'node_modules/chai/chai', node: 'node_modules/chai/chai' }, ulid: { browser: 'node_modules/ulid/dist/index.umd', node: 'node_modules/ulid/dist/index.umd' }, + interception_proxy_client: { + browser: 'test/common/modules/interception_proxy_client', + node: 'test/common/modules/interception_proxy_client', + }, private_api_recorder: { browser: 'test/common/modules/private_api_recorder', node: 'test/common/modules/private_api_recorder', }, + 'json-rpc-2.0': { + browser: 'test/support/json-rpc-2.0/build/browser', + }, }); }); diff --git a/test/common/modules/interception_proxy_client.js b/test/common/modules/interception_proxy_client.js new file mode 100644 index 000000000..51ba8b789 --- /dev/null +++ b/test/common/modules/interception_proxy_client.js @@ -0,0 +1,226 @@ +'use strict'; + +define(['ably', 'json-rpc-2.0'], function (Ably, { JSONRPCClient, JSONRPCServer, JSONRPCServerAndClient }) { + // copied from crypto test + var msgpack = typeof window == 'object' ? Ably.msgpack : require('@ably/msgpack-js'); + // similar approach + var WebSocket = typeof window == 'object' ? window.WebSocket : require('ws'); + var BufferUtils = Ably.Realtime.Platform.BufferUtils; + + function log(...args) { + console.log('Interception proxy client:', ...args); + } + + function serialize(type, deserialized) { + let data; + + if (type === 'binary') { + const serialized = msgpack.encode(deserialized); + data = BufferUtils.base64Encode(serialized); + } else if (type === 'text') { + data = JSON.stringify(deserialized); + } + + return { type, data }; + } + + class InterceptionProxyClient { + currentContext = null; + + constructor() { + this.jsonRPC = new JSONRPCServerAndClient( + new JSONRPCServer(), + new JSONRPCClient((request) => { + const data = JSON.stringify(request); + log('sending data to control server:', data); + this.webSocket.send(data); + }), + ); + + this.jsonRPC.addMethod('transformInterceptedMessage', (params) => this.transformInterceptedMessage(params)); + } + + // this expects the interception proxy to already be running (i.e. the test suite doesn't launch it) + // this method is called by test suite’s root hooks. test cases shouldn't call this method; rather, they should use `intercept` + async connect() { + this.webSocket = new WebSocket('ws://localhost:8001'); + + await new Promise((resolve, reject) => { + this.webSocket.addEventListener('open', () => { + log('connected to interception proxy'); + resolve(); + }); + this.webSocket.addEventListener('error', (error) => { + log('failed to connect to interception proxy:', error); + reject(error); + }); + this.webSocket.addEventListener('message', ({ data }) => { + log('got control API message', data); + this.jsonRPC.receiveAndSend(JSON.parse(data)); + }); + }); + + await this.startInterception(); + log('startInterception completed'); + + // TODO something if connection lost + } + + async startInterception() { + // i.e. for browser we use proxy, for Node we use local + const params = typeof window == 'object' ? { mode: 'proxy' } : { mode: 'local', pid: process.pid }; + + log('sending startInterception request with params', params); + await this.jsonRPC.request('startInterception', params); + } + + async disconnect() { + if (this.webSocket.readyState === 3) { + // already closed + log('already disconnected'); + return; + } + + this.webSocket.close(); + + return new Promise((resolve) => { + this.webSocket.addEventListener('close', () => { + log('disconnected'); + resolve(); + }); + }); + } + + // TODO explain motivation for this API (so that a lingering test can’t accidentally override the interception in your test; of course, the interception in your test might accidentally _intercept_ messages sent by a lingering test but that’s a separate issue). More broadly it’s a way of ensuring a test case’s effects don’t outlive its execution; perhaps we could do this using hooks instead + // + // This is written as (done, action) for compatibility with the way our tests are currently written; a promise-based version would be good to have too + // + // action receives a context object. it can modify the following properties of this object to modify the interception: + // + // - `transformClientMessage` or `transformServerMessage` (for message from client or server respectively) + // + // Receives an object with the following properties: + // + // - id: a unique identifier for this WebSocket message (generated by the interception proxy) + // - connectionID: a unique identifier for this WebSocket connection (generated by the interception proxy) + // - deserialized: a JSON-like object (i.e. the result of JSON.parse or msgpack.decode) + // + // And returns one of: + // + // - a JSON-like object (to modify the message) + // - `null` (to drop the message) + // + // Can also return a promise. + // + // If not set, then messages will be passed through unaltered. + // + // TODO some thoughts on API: + // + // - user currently has to make sure they remember to return something from the transform* function, even if they’re failing their test in some exceptional manner — this is to make sure that the connection can be allowed to disconnect at the proxy + // + // - users might think that they can directly mutate the object passed to transform* functions + intercept(done, action) { + if (this.currentContext !== null) { + throw new Error( + 'A call to `intercept` is already active; check you’re not running multiple tests at the same time', + ); + } + + this.currentContext = new InterceptionContext(this.jsonRPC); + + const newDone = (error) => { + this.currentContext = null; + done(error); + }; + + action(newDone, this.currentContext); + } + + async transformInterceptedMessage(paramsDTO) { + this.currentContext?._recordSeenConnection(paramsDTO); + + let deserialized; + if (paramsDTO.type === 'binary') { + const data = BufferUtils.base64Decode(paramsDTO.data); + deserialized = msgpack.decode(data); + } else if (paramsDTO.type === 'text') { + const data = paramsDTO.data; + deserialized = JSON.parse(data); + } + + log('awaiting response of transformInterceptedMessage for message', paramsDTO, 'deserialized to', deserialized); + + const message = { id: paramsDTO.id, connectionID: paramsDTO.connectionID, deserialized }; + + const noOpTransformInterceptedMessage = (message) => { + log(`default transformInterceptedMessage implementation passing message ${message.id} unaltered`); + return message.deserialized; + }; + + const contextTransformInterceptedMessage = + (paramsDTO.fromClient + ? this.currentContext?.transformClientMessage + : this.currentContext?.transformServerMessage) ?? noOpTransformInterceptedMessage; + + const result = await contextTransformInterceptedMessage(message); + log(`got result of transforming message ${message.id}`, result); + + if (result === null) { + return { action: 'drop' }; + } else { + return { action: 'replace', ...serialize(paramsDTO.type, result) }; + } + } + } + + class InterceptionContext { + transformClientMessage = null; + transformServerMessage = null; + // TODO this is a temporary API until I figure out what the right thing to do is (probably to add an interception proxy notification when a new connection is intercepted, and then infer it from the query param), but document it anyway + // elements are { type: 'binary' | 'text' } + // + // keyed by connection ID, ordered oldest-to-newest connection + #seenConnections = new Map(); + + constructor(jsonRPC) { + this.jsonRPC = jsonRPC; + } + + _recordSeenConnection(transformInterceptedMessageParamsDTO) { + const { connectionID, type } = transformInterceptedMessageParamsDTO; + + if (this.#seenConnections.has(connectionID)) { + return; + } + + this.#seenConnections.set(connectionID, { type }); + } + + // TODO the term "connection ID" is a bit overloaded (becuse it’s an Ably concept too) + get latestConnectionID() { + if (this.#seenConnections.size === 0) { + return null; + } + + return Array.from(this.#seenConnections.keys()).pop(); + } + + async injectMessage(connectionID, deserialized, fromClient) { + const seenConnection = this.#seenConnections.get(connectionID); + if (!seenConnection) { + throw new Error(`Cannot inject message — have not seen a connection with ID ${connectionID}`); + } + + const params = { + connectionID, + fromClient, + ...serialize(seenConnection.type, deserialized), + }; + + log('sending injectMessage request with params', params); + await this.jsonRPC.request('injectMessage', params); + } + } + + return (module.exports = new InterceptionProxyClient()); +}); diff --git a/test/realtime/auth.test.js b/test/realtime/auth.test.js index 3cdd43dcb..81899d0ca 100644 --- a/test/realtime/auth.test.js +++ b/test/realtime/auth.test.js @@ -1,6 +1,12 @@ 'use strict'; -define(['ably', 'shared_helper', 'async', 'chai'], function (Ably, Helper, async, chai) { +define(['ably', 'shared_helper', 'async', 'chai', 'interception_proxy_client'], function ( + Ably, + Helper, + async, + chai, + interceptionProxyClient, +) { var currentTime; var exampleTokenDetails; var exports = {}; @@ -1167,42 +1173,51 @@ define(['ably', 'shared_helper', 'async', 'chai'], function (Ably, Helper, async * @spec RTN22 */ it('mocked_reauth', function (done) { - var helper = this.test.helper, - rest = helper.AblyRest(), - authCallback = function (tokenParams, callback) { - // Request a token (should happen twice) - Helper.whenPromiseSettles(rest.auth.requestToken(tokenParams, null), function (err, tokenDetails) { - if (err) { - helper.closeAndFinish(done, realtime, err); - return; - } - callback(null, tokenDetails); - }); - }, - realtime = helper.AblyRealtime({ authCallback: authCallback, transports: [helper.bestTransport] }); + interceptionProxyClient.intercept(done, (done, interceptionContext) => { + var helper = this.test.helper, + rest = helper.AblyRest(), + authCallback = function (tokenParams, callback) { + // Request a token (should happen twice) + Helper.whenPromiseSettles(rest.auth.requestToken(tokenParams, null), function (err, tokenDetails) { + if (err) { + helper.closeAndFinish(done, realtime, err); + return; + } + callback(null, tokenDetails); + }); + }, + realtime = helper.AblyRealtime({ authCallback: authCallback, transports: [helper.bestTransport] }); - realtime.connection.once('connected', function () { - helper.recordPrivateApi('read.connectionManager.activeProtocol.transport'); - var transport = realtime.connection.connectionManager.activeProtocol.transport, - originalSend = transport.send; - helper.recordPrivateApi('replace.transport.send'); - /* Spy on transport.send to detect the outgoing AUTH */ - transport.send = function (message) { - if (message.action === 17) { - try { - expect(message.auth.accessToken, 'Check AUTH message structure is as expected').to.be.ok; - helper.closeAndFinish(done, realtime); - } catch (err) { - helper.closeAndFinish(done, realtime, err); + realtime.connection.once('connected', function () { + /* Spy on client messages to detect the outgoing AUTH */ + interceptionContext.transformClientMessage = ({ deserialized: message }) => { + if (message.action === 17) { + // TODO return value? the original code didn’t call originalSend. We should either: + // - make sure that we always return something (i.e. force it on to callers) + // - make sure that if nothing is returned then the interception proxy client makes this very obvious + // - make sure to clean up outstanding messages when the `intercept`-created `done` is called + // + // I think this is what’s causing this in the logs: + // Interception proxy client: got result of transforming message d814955d-8a15-4c2f-b873-1bd0c3448635 undefined + // and what's hence causing Realtime to send + // message: 'Invalid websocket message (decode failure). (See https://help.ably.io/error/40000 for help.)', + // + // TODO + // should we have a separate "spy" interception proxy API that doesn’t require a return value? + try { + expect(message.auth.accessToken, 'Check AUTH message structure is as expected').to.be.ok; + helper.closeAndFinish(done, realtime); + } catch (err) { + helper.closeAndFinish(done, realtime, err); + } + return null; + } else { + return message; } - } else { - helper.recordPrivateApi('call.transport.send'); - originalSend.call(this, message); - } - }; - /* Inject a fake AUTH from realtime */ - helper.recordPrivateApi('call.transport.onProtocolMessage'); - transport.onProtocolMessage({ action: 17 }); + }; + /* Inject a fake AUTH from realtime */ + interceptionContext.injectMessage(interceptionContext.latestConnectionID, { action: 17 }, false); + }); }); }); diff --git a/test/realtime/connection.test.js b/test/realtime/connection.test.js index 5a6b7952c..dce9d6c7d 100644 --- a/test/realtime/connection.test.js +++ b/test/realtime/connection.test.js @@ -1,6 +1,12 @@ 'use strict'; -define(['ably', 'shared_helper', 'async', 'chai'], function (Ably, Helper, async, chai) { +define(['ably', 'shared_helper', 'async', 'chai', 'interception_proxy_client'], function ( + Ably, + Helper, + async, + chai, + interceptionProxyClient, +) { var expect = chai.expect; var createPM = Ably.protocolMessageFromDeserialized; @@ -185,119 +191,123 @@ define(['ably', 'shared_helper', 'async', 'chai'], function (Ably, Helper, async * @spec RTN19a2 */ it('connectionQueuing', function (done) { - var helper = this.test.helper, - realtime = helper.AblyRealtime({ transports: [helper.bestTransport] }), - channel = realtime.channels.get('connectionQueuing'), - connectionManager = realtime.connection.connectionManager; - - realtime.connection.once('connected', function () { - helper.recordPrivateApi('read.connectionManager.activeProtocol.transport'); - var transport = connectionManager.activeProtocol.transport; - Helper.whenPromiseSettles(channel.attach(), function (err) { - if (err) { - helper.closeAndFinish(done, realtime, err); - return; - } + interceptionProxyClient.intercept(done, (done, interceptionContext) => { + var helper = this.test.helper, + realtime = helper.AblyRealtime({ transports: [helper.bestTransport] }), + channel = realtime.channels.get('connectionQueuing'), + connectionManager = realtime.connection.connectionManager; - let transportSendCallback; + realtime.connection.once('connected', function () { + Helper.whenPromiseSettles(channel.attach(), function (err) { + if (err) { + helper.closeAndFinish(done, realtime, err); + return; + } - helper.recordPrivateApi('replace.transport.send'); - /* Sabotage sending the message */ - transport.send = function (msg) { - if (msg.action == 15) { - expect(msg.msgSerial).to.equal(0, 'Expect msgSerial to be 0'); + let transportSendCallback; - if (!transportSendCallback) { - done(new Error('transport.send override called before transportSendCallback populated')); - } + /* Sabotage sending the message */ + interceptionContext.transformClientMessage = (msg) => { + if (msg.deserialized.action == 15) { + expect(msg.deserialized.msgSerial).to.equal(0, 'Expect msgSerial to be 0'); - transportSendCallback(null); - } - }; + if (!transportSendCallback) { + done(new Error('transport.send override called before transportSendCallback populated')); + } - let publishCallback; + transportSendCallback(null); + } + }; - async.series( - [ - function (cb) { - transportSendCallback = cb; + let publishCallback; - /* Sabotaged publish */ - Helper.whenPromiseSettles(channel.publish('first', null), function (err) { - if (!publishCallback) { - done(new Error('publish completed before publishCallback populated')); - } - publishCallback(err); - }); - }, + async.series( + [ + function (cb) { + transportSendCallback = cb; - // We wait for transport.send to recieve the message that we just - // published before we proceed to disconnecting the transport, to - // make sure that the message got marked as `sendAttempted`. + /* Sabotaged publish */ + Helper.whenPromiseSettles(channel.publish('first', null), function (err) { + if (!publishCallback) { + done(new Error('publish completed before publishCallback populated')); + } + publishCallback(err); + }); + }, - function (cb) { - async.parallel( - [ - function (cb) { - publishCallback = function (err) { - try { - expect(!err, 'Check publish happened (eventually) without err').to.be.ok; - } catch (err) { - cb(err); - return; - } - cb(); - }; - }, - function (cb) { - /* After the disconnect, on reconnect, spy on transport.send again */ - helper.recordPrivateApi('listen.connectionManager.transport.pending'); - connectionManager.once('transport.pending', function (transport) { - var oldSend = transport.send; + // We wait for transport.send to recieve the message that we just + // published before we proceed to disconnecting the transport, to + // make sure that the message got marked as `sendAttempted`. - helper.recordPrivateApi('replace.transport.send'); - transport.send = function (msg, msgCb) { - if (msg.action === 15) { - if (msg.messages[0].name === 'first') { - try { - expect(msg.msgSerial).to.equal(0, 'Expect msgSerial of original message to still be 0'); - expect(msg.messages.length).to.equal( - 1, - 'Expect second message to not have been merged with the attempted message', - ); - } catch (err) { - cb(err); - return; - } - } else if (msg.messages[0].name === 'second') { - try { - expect(msg.msgSerial).to.equal(1, 'Expect msgSerial of new message to be 1'); - } catch (err) { - cb(err); - return; - } - cb(); - } + function (cb) { + async.parallel( + [ + function (cb) { + publishCallback = function (err) { + try { + expect(!err, 'Check publish happened (eventually) without err').to.be.ok; + } catch (err) { + cb(err); + return; } - helper.recordPrivateApi('call.transport.send'); - oldSend.call(transport, msg, msgCb); + cb(); }; - channel.publish('second', null); - }); + }, + function (cb) { + /* After the disconnect, on reconnect, spy on transport.send again */ + helper.recordPrivateApi('listen.connectionManager.transport.pending'); + connectionManager.once('transport.pending', function (transport) { + // TODO does the identity of this transport matter, and can we replace the `transport.pending` check with something external too (e.g. detecting a new connection)? perhaps let's have an EventEmitter interface on the interception context that says when there's a new connection or something + interceptionContext.transformClientMessage = function (msg) { + if (msg.deserialized.action === 15) { + if (msg.deserialized.messages[0].name === 'first') { + try { + expect(msg.deserialized.msgSerial).to.equal( + 0, + 'Expect msgSerial of original message to still be 0', + ); + expect(msg.deserialized.messages.length).to.equal( + 1, + 'Expect second message to not have been merged with the attempted message', + ); + } catch (err) { + cb(err); + return msg.deserialized; + } + } else if (msg.deserialized.messages[0].name === 'second') { + try { + expect(msg.deserialized.msgSerial).to.equal( + 1, + 'Expect msgSerial of new message to be 1', + ); + } catch (err) { + cb(err); + return msg.deserialized; + } + cb(); + } + } + + // preserve the message + return msg.deserialized; + }; + channel.publish('second', null); + }); - /* Disconnect the transport (will automatically reconnect and resume) () */ - helper.recordPrivateApi('call.connectionManager.disconnectAllTransports'); - connectionManager.disconnectAllTransports(); - }, - ], - cb, - ); + /* Disconnect the transport (will automatically reconnect and resume) () */ + helper.recordPrivateApi('call.connectionManager.disconnectAllTransports'); + connectionManager.disconnectAllTransports(); + }, + ], + cb, + ); + }, + ], + function (err) { + helper.closeAndFinish(done, realtime, err); }, - ], - function (err) { - helper.closeAndFinish(done, realtime, err); - }, - ); + ); + }); }); }); }); diff --git a/test/support/browser_file_list.js b/test/support/browser_file_list.js index 80d5d8d8b..e90d2a3f8 100644 --- a/test/support/browser_file_list.js +++ b/test/support/browser_file_list.js @@ -8,6 +8,7 @@ window.__testFiles__.files = { 'browser/lib/util/base64.js': true, 'node_modules/async/lib/async.js': true, 'node_modules/@ably/vcdiff-decoder/dist/vcdiff-decoder.js': true, + 'test/support/json-rpc-2.0/build/index.js': true, 'test/common/globals/environment.js': true, 'test/common/globals/named_dependencies.js': true, 'test/common/modules/client_module.js': true, diff --git a/test/support/json-rpc-2.0/index.js b/test/support/json-rpc-2.0/index.js new file mode 100644 index 000000000..b169ee62f --- /dev/null +++ b/test/support/json-rpc-2.0/index.js @@ -0,0 +1,15 @@ +/* +For some reason, the object that I get when I write +`require('json-rpc-2.0')` has all of the correct keys (JSONRPCClient etc) +but the values are all undefined. No idea why and don’t really want to spend +time debugging it. So, here I’ve copied the require statements from the +library’s entrypoint (../../../node_modules/json-rpc-2.0/dist/index.js) and +it’s working 🤷 +*/ +module.exports = { + ...require('../../../node_modules/json-rpc-2.0/dist/client'), + ...require('../../../node_modules/json-rpc-2.0/dist/interfaces'), + ...require('../../../node_modules/json-rpc-2.0/dist/models'), + ...require('../../../node_modules/json-rpc-2.0/dist/server'), + ...require('../../../node_modules/json-rpc-2.0/dist/server-and-client'), +}; diff --git a/test/support/openPlaywrightBrowser.js b/test/support/openPlaywrightBrowser.js new file mode 100644 index 000000000..2f828bd29 --- /dev/null +++ b/test/support/openPlaywrightBrowser.js @@ -0,0 +1,5 @@ +const { openPlaywrightBrowser } = require('./playwrightHelpers'); + +(async function run() { + await openPlaywrightBrowser(false /* headless */); +})(); diff --git a/test/support/playwrightHelpers.js b/test/support/playwrightHelpers.js new file mode 100644 index 000000000..ac3740f76 --- /dev/null +++ b/test/support/playwrightHelpers.js @@ -0,0 +1,67 @@ +const path = require('path'); +const util = require('util'); +const exec = util.promisify(require('child_process').exec); +const playwright = require('playwright'); +const { randomUUID } = require('crypto'); +const playwrightBrowsers = ['chromium', 'firefox', 'webkit']; + +async function openPlaywrightBrowser(headless) { + const browserEnv = process.env.PLAYWRIGHT_BROWSER; + + if (!playwrightBrowsers.includes(browserEnv)) { + throw new Error( + `PLAYWRIGHT_BROWSER environment variable must be one of: ${playwrightBrowsers.join( + ', ', + )}. Currently: ${browserEnv}`, + ); + } + + const browserType = playwright[browserEnv]; + + const options = { + headless, + // bypass localhost so that the proxy doesn’t need to be running in order for us to contact the control API to tell it to be started; TODO there is quite possibly a less convoluted way of starting the proxy in this case, also think in a more holistic manner about the various ways in which we make sure that only certain traffic is intercepted (there are notes dotted around about this) + proxy: { server: 'localhost:8080', bypass: 'localhost' }, + }; + + // (I originally tried using the ignoreHTTPSErrors Playwright option, but that doesn’t seem to work for CORS preflight requests) + + let context; + let closeBrowser; + + if (browserEnv === 'firefox') { + // TODO clean up when closing + const profileDirectory = path.join('tmp', 'browser-profiles', `browserEnv-${randomUUID()}`); + + // We create and then discard a browser instance just to create the structure of the profile directory, which I guess certutil needs + // TODO this probably isn’t necessary; I think we can just create the directory ahead of time and then use certutil to create the DB, like we do for Chromium + const throwawayBrowser = await browserType.launchPersistentContext(profileDirectory, { + ...options, + headless: true, + }); + await throwawayBrowser.close(); + + // Install the mitmproxy root CA cert + // https://github.com/microsoft/playwright/issues/18115#issuecomment-2067175748 + // https://wiki.mozilla.org/CA/AddRootToFirefox + // https://sadique.io/blog/2012/06/05/managing-security-certificates-from-the-console-on-windows-mac-os-x-and-linux/#firefox + // TODO document that on macOS you get certutil from `brew install nss` + await exec( + `certutil -A -d ${profileDirectory} -t C -n "Mitmproxy Root Cert" -i ~/.mitmproxy/mitmproxy-ca-cert.pem`, + ); + + context = await browserType.launchPersistentContext(profileDirectory, options); + closeBrowser = () => context.close(); + } else { + // TODO explain what to do for trust (I think that for WebKit and Chromium you need to install in the system store) + browser = await browserType.launch(options); + context = await browser.newContext(options); + closeBrowser = () => browser.close(); + } + + const page = await context.newPage(); + + return { browserType, closeBrowser, page }; +} + +module.exports = { openPlaywrightBrowser }; diff --git a/test/support/root_hooks.js b/test/support/root_hooks.js index 0f1232c39..a1009eeda 100644 --- a/test/support/root_hooks.js +++ b/test/support/root_hooks.js @@ -1,4 +1,8 @@ -define(['shared_helper'], function (Helper) { +define(['shared_helper', 'interception_proxy_client'], function (Helper, interceptionProxyClient) { + before(async function () { + await interceptionProxyClient.connect(); + }); + after(function (done) { const helper = Helper.forHook(this); this.timeout(10 * 1000); @@ -12,6 +16,12 @@ define(['shared_helper'], function (Helper) { helper.dumpPrivateApiUsage(); }); + after(async () => { + await interceptionProxyClient.disconnect(); + }); + + // The `START TEST` and `END TEST` logs are to make it easy to see the IDs of interception proxy connections that were started during the test, to correlate with the proxy logs + afterEach(function () { this.currentTest.helper.closeActiveClients(); }); @@ -21,6 +31,13 @@ define(['shared_helper'], function (Helper) { afterEach(function () { this.currentTest.helper.flushTestLogs(); }); + afterEach(function () { + console.log(`END TEST: ${this.currentTest.fullTitle()}`); + }); + + beforeEach(function () { + console.log(`START TEST: ${this.currentTest.fullTitle()}`); + }); beforeEach(function () { this.currentTest.helper = Helper.forTest(this); this.currentTest.helper.recordTestStart(); diff --git a/test/support/runPlaywrightTests.js b/test/support/runPlaywrightTests.js index edbcc1531..d7701746f 100644 --- a/test/support/runPlaywrightTests.js +++ b/test/support/runPlaywrightTests.js @@ -1,19 +1,17 @@ -const playwright = require('playwright'); const path = require('path'); const MochaServer = require('../web_server'); const fs = require('fs'); +const { openPlaywrightBrowser } = require('./playwrightHelpers'); const outputDirectoryPaths = require('./output_directory_paths'); const port = process.env.PORT || 3000; const host = 'localhost'; -const playwrightBrowsers = ['chromium', 'firefox', 'webkit']; const mochaServer = new MochaServer(/* playwrightTest: */ true); -const runTests = async (browserType) => { +const runTests = async () => { + const { browserType, closeBrowser, page } = await openPlaywrightBrowser(true /* headless */); + await mochaServer.listen(); - const browser = await browserType.launch(); - const context = await browser.newContext(); - const page = await context.newPage(); await page.goto(`http://${host}:${port}`); console.log(`\nrunning tests in ${browserType.name()}`); @@ -59,7 +57,7 @@ const runTests = async (browserType) => { } if (detail.pass) { - browser.close(); + closeBrowser(); resolve(); } else { reject(new Error(`${browserType.name()} tests failed, exiting with code 1`)); @@ -83,17 +81,7 @@ const runTests = async (browserType) => { let caughtError; try { - const browserEnv = process.env.PLAYWRIGHT_BROWSER; - - if (!playwrightBrowsers.includes(browserEnv)) { - throw new Error( - `PLAYWRIGHT_BROWSER environment variable must be one of: ${playwrightBrowsers.join( - ', ', - )}. Currently: ${browserEnv}`, - ); - } - - await runTests(playwright[browserEnv]); + await runTests(); } catch (error) { // save error for now, we must ensure we end mocha web server first. // if we end current process too early, mocha web server will be left running, diff --git a/webpack.config.js b/webpack.config.js index b00088412..e56603fd2 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -138,8 +138,28 @@ function createMochaJUnitReporterConfig() { }; } +/** + * Create an AMD version of the json-rpc-2.0 library so that we can use RequireJS to load it in the browser. + */ +function createJSONRPCConfig() { + const dir = path.join(__dirname, 'test', 'support', 'json-rpc-2.0'); + + return { + mode: 'development', + entry: path.join(dir, 'index.js'), + output: { + path: path.join(dir, 'build'), + filename: 'browser.js', + library: { + type: 'amd', + }, + }, + }; +} + module.exports = { nativeScript: nativeScriptConfig, reactNative: reactNativeConfig, mochaJUnitReporterBrowser: createMochaJUnitReporterConfig(), + jsonRPC: createJSONRPCConfig(), };