diff --git a/config.env b/config.env
index cf4dec5c..a6ba7aae 100644
--- a/config.env
+++ b/config.env
@@ -1,5 +1,6 @@
BN_WS_OPERATIONS_TIME_WINDOW=900
BN_WS_OPERATIONS_LIMIT=60
+BN_WS_APPS_OPERATIONS_LIMIT=500
# Update this port in web/brightid-nginx.conf and docker-compose.yml too
BN_WS_PROFILE_SERVICE_PORT=3000
BN_ARANGO_PROTOCOL=http
@@ -26,6 +27,7 @@ BN_UPDATER_IDCHAIN_WSS=wss://idchain.one/ws/
BN_UPDATER_SEED_GROUPS_WS_URL=wss://idchain.one/ws/
BN_ARANGO_EXTRA_OPTS=
BN_DEVELOPMENT=false
+BN_PEERS=
# passwords
BN_SEED=
BN_WS_PRIVATE_KEY=
diff --git a/config.env.test b/config.env.test
index c1f8e1b5..628e54c1 100644
--- a/config.env.test
+++ b/config.env.test
@@ -1,5 +1,6 @@
BN_WS_OPERATIONS_TIME_WINDOW=900
BN_WS_OPERATIONS_LIMIT=60
+BN_WS_APPS_OPERATIONS_LIMIT=500
# Update this port in web/brightid-nginx.conf and docker-compose.yml too
BN_WS_PROFILE_SERVICE_PORT=3000
BN_ARANGO_PROTOCOL=http
@@ -26,6 +27,7 @@ BN_UPDATER_IDCHAIN_WSS=wss://idchain.one/ws/
BN_UPDATER_SEED_GROUPS_WS_URL=wss://idchain.one/ws/
BN_ARANGO_EXTRA_OPTS=
BN_DEVELOPMENT=true
+BN_PEERS=
# passwords
BN_SEED=
BN_WS_PRIVATE_KEY=
diff --git a/consensus/receiver.py b/consensus/receiver.py
index d8ca606a..0fd6b03d 100755
--- a/consensus/receiver.py
+++ b/consensus/receiver.py
@@ -6,6 +6,7 @@
import hashlib
import shutil
import requests
+import traceback
from arango import ArangoClient, errno
from web3 import Web3
from web3.middleware import geth_poa_middleware
@@ -22,14 +23,14 @@
def hash(op):
blockTime = op['blockTime']
op = {k: op[k] for k in op if k not in (
- 'sig', 'sig1', 'sig2', 'hash', 'blockTime')}
+ 'sig', 'sig1', 'sig2', 'sig3', 'sig4', 'sig5', 'hash', 'blockTime')}
if op['name'] == 'Set Signing Key':
del op['id1']
del op['id2']
# in next release checking blockTime should be removed
if op['name'] == 'Social Recovery' and op['v'] == 6 and blockTime > 1637380189000:
- del op['id1']
- del op['id2']
+ for k in ['id1', 'id2', 'id3', 'id4', 'id5']:
+ op.pop(k, None)
message = json.dumps(op, sort_keys=True, separators=(',', ':'))
m = hashlib.sha256()
m.update(message.encode('ascii'))
@@ -180,4 +181,5 @@ def wait():
main()
except Exception as e:
print(f'Error: {e}')
+ print(f'Traceback: {traceback.format_exc()}')
time.sleep(10)
diff --git a/docker-compose.yml b/docker-compose.yml
index 70d4953c..e3c98704 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -82,6 +82,7 @@ services:
image: nginx
volumes:
- ./web/brightid-nginx.conf:/etc/nginx/nginx.conf
+ - ./web/index.html:/var/www/html/index.html
network_mode: host
env_file:
- config.env
diff --git a/web/Dockerfile b/web/Dockerfile
deleted file mode 100644
index 8b590ee4..00000000
--- a/web/Dockerfile
+++ /dev/null
@@ -1,2 +0,0 @@
-FROM nginx:latest
-COPY brightid-nginx-dappnode.conf /etc/nginx/nginx.conf
diff --git a/web/brightid-nginx.conf b/web/brightid-nginx.conf
index 87efde80..c318e813 100644
--- a/web/brightid-nginx.conf
+++ b/web/brightid-nginx.conf
@@ -36,6 +36,8 @@ http {
server {
listen 80;
+ root /var/www/html/;
+
location /profile/ {
client_max_body_size 100k;
proxy_pass http://127.0.0.1:3000/;
diff --git a/web/index.html b/web/index.html
new file mode 100644
index 00000000..be7afd5c
--- /dev/null
+++ b/web/index.html
@@ -0,0 +1,52 @@
+
+
+
+ BrightID Node
+
+
+
+
+
+
+ BrightID Node
+
+
+
+
+
+
diff --git a/web_services/docker-entrypoint.sh b/web_services/docker-entrypoint.sh
index 05a4a2eb..2d308d94 100755
--- a/web_services/docker-entrypoint.sh
+++ b/web_services/docker-entrypoint.sh
@@ -12,6 +12,7 @@ foxx config /brightid5 privateKey=$BN_WS_PRIVATE_KEY
foxx config /brightid5 ethPrivateKey=$BN_WS_ETH_PRIVATE_KEY
foxx config /brightid5 operationsTimeWindow=$BN_WS_OPERATIONS_TIME_WINDOW
foxx config /brightid5 operationsLimit=$BN_WS_OPERATIONS_LIMIT
+foxx config /brightid5 appsOperationsLimit=$BN_WS_APPS_OPERATIONS_LIMIT
foxx upgrade /apply5 /code/foxx/apply5.zip ||
foxx install /apply5 /code/foxx/apply5.zip
@@ -25,7 +26,9 @@ foxx config /brightid6 ethPrivateKey=$BN_WS_ETH_PRIVATE_KEY
foxx config /brightid6 consensusSenderPrivateKey=$BN_CONSENSUS_PRIVATE_KEY
foxx config /brightid6 operationsTimeWindow=$BN_WS_OPERATIONS_TIME_WINDOW
foxx config /brightid6 operationsLimit=$BN_WS_OPERATIONS_LIMIT
+foxx config /brightid6 appsOperationsLimit=$BN_WS_APPS_OPERATIONS_LIMIT
foxx config /brightid6 development=$BN_DEVELOPMENT
+foxx config /brightid6 peers=$BN_PEERS
foxx upgrade /apply6 /code/foxx/apply6.zip ||
foxx install /apply6 /code/foxx/apply6.zip
diff --git a/web_services/foxx/apply5.zip b/web_services/foxx/apply5.zip
index aca1c504..08389ba4 100644
Binary files a/web_services/foxx/apply5.zip and b/web_services/foxx/apply5.zip differ
diff --git a/web_services/foxx/apply6.zip b/web_services/foxx/apply6.zip
index b766e5fb..1af87a16 100644
Binary files a/web_services/foxx/apply6.zip and b/web_services/foxx/apply6.zip differ
diff --git a/web_services/foxx/brightid/db.js b/web_services/foxx/brightid/db.js
index bac44a0b..ed035faf 100755
--- a/web_services/foxx/brightid/db.js
+++ b/web_services/foxx/brightid/db.js
@@ -618,6 +618,11 @@ function setSigningKey(signingKey, key, timestamp) {
signingKeys: [signingKey],
updateTime: timestamp,
});
+
+ // remove pending invites, because they can not be decrypted anymore by the new signing key
+ invitationsColl.removeByExample({
+ _from: "users/" + key,
+ });
}
function getSponsorship(appUserId) {
@@ -963,7 +968,7 @@ function sponsorRequestedRecently(op) {
const lastSponsorTimestamp = query`
FOR o in ${operationsColl}
FILTER o.name == "Sponsor"
- AND o.appUserId == ${op.appUserId}
+ AND o.appUserId IN ${[op.appUserId, op.appUserId.toLowerCase()]}
SORT o.timestamp ASC
RETURN o.timestamp
`
@@ -973,6 +978,32 @@ function sponsorRequestedRecently(op) {
return lastSponsorTimestamp && Date.now() - lastSponsorTimestamp < timeWindow;
}
+function getRequiredRecoveryNum(id) {
+ const user = getUser(id);
+ if (
+ "nextRequiredRecoveryNum" in user &&
+ user.requiredRecoveryNumSetAfter <= Date.now()
+ ) {
+ user.requiredRecoveryNum = user.nextRequiredRecoveryNum;
+ delete user.nextRequiredRecoveryNum;
+ delete user.requiredRecoveryNumSetAfter;
+ usersColl.replace(id, user);
+ }
+ return user.requiredRecoveryNum || 2;
+}
+
+function setRequiredRecoveryNum(id, requiredRecoveryNum, timestamp) {
+ const recoveryConnections = getRecoveryConnections(id);
+ if (recoveryConnections.length < requiredRecoveryNum) {
+ throw new errors.InvalidNumberOfSignersError();
+ }
+
+ usersColl.update(id, {
+ nextRequiredRecoveryNum: requiredRecoveryNum,
+ requiredRecoveryNumSetAfter: Date.now() + 7 * 24 * 60 * 60 * 1000,
+ });
+}
+
module.exports = {
connect,
createGroup,
@@ -1017,4 +1048,6 @@ module.exports = {
isEthereumAddress,
getAppUserIds,
sponsorRequestedRecently,
+ setRequiredRecoveryNum,
+ getRequiredRecoveryNum,
};
diff --git a/web_services/foxx/brightid/errors.js b/web_services/foxx/brightid/errors.js
index cdc132c5..947f0347 100755
--- a/web_services/foxx/brightid/errors.js
+++ b/web_services/foxx/brightid/errors.js
@@ -55,6 +55,8 @@ const CACHED_PARAMS_NOT_FOUND = 65;
const FORBIDDEN_CONNECTION = 66;
const UNSINGABLE_APP_USER_ID = 67;
const SPONSOR_REQUESTED_RECENTLY = 68;
+const WRONG_NUMBER_OF_SIGNERS = 69;
+const INVALID_NUMBER_OF_SIGNERS = 70;
class BrightIDError extends Error {
constructor() {
@@ -581,7 +583,26 @@ class SponsorRequestedRecently extends ForbiddenError {
constructor() {
super();
this.errorNum = SPONSOR_REQUESTED_RECENTLY;
- this.message = `The app has sent this sponsor request recently.`;
+ this.message = "The app has sent this sponsor request recently.";
+ }
+}
+
+class WrongNumberOfSignersError extends ForbiddenError {
+ constructor(missedAttr, requiredRecoveryNum) {
+ super();
+ this.errorNum = WRONG_NUMBER_OF_SIGNERS;
+ this.message = `${missedAttr} is missed while ${requiredRecoveryNum} signers are required.`;
+ this.missedAttr = missedAttr;
+ this.requiredRecoveryNum = requiredRecoveryNum;
+ }
+}
+
+class InvalidNumberOfSignersError extends ForbiddenError {
+ constructor() {
+ super();
+ this.errorNum = INVALID_NUMBER_OF_SIGNERS;
+ this.message =
+ "The number of signers should be equal or less than the number of recovery connections.";
}
}
@@ -642,6 +663,8 @@ module.exports = {
FORBIDDEN_CONNECTION,
UNSINGABLE_APP_USER_ID,
SPONSOR_REQUESTED_RECENTLY,
+ WRONG_NUMBER_OF_SIGNERS,
+ INVALID_NUMBER_OF_SIGNERS,
BrightIDError,
BadRequestError,
InternalServerError,
@@ -704,4 +727,6 @@ module.exports = {
ForbiddenConnectionError,
UnsingableAppUserIdError,
SponsorRequestedRecently,
+ WrongNumberOfSignersError,
+ InvalidNumberOfSignersError,
};
diff --git a/web_services/foxx/brightid/index.js b/web_services/foxx/brightid/index.js
index cd8a0cd9..eed0d613 100755
--- a/web_services/foxx/brightid/index.js
+++ b/web_services/foxx/brightid/index.js
@@ -41,7 +41,6 @@ const handlers = {
const op = req.body;
const message = operations.getMessage(op);
op.hash = hash(message);
-
if (operationsHashesColl.exists(op.hash)) {
throw new errors.OperationAppliedBeforeError(op.hash);
} else if (JSON.stringify(op).length > MAX_OP_SIZE) {
@@ -53,7 +52,9 @@ const handlers = {
// allow limited number of operations to be posted in defined time window
const timeWindow = module.context.configuration.operationsTimeWindow * 1000;
- const limit = module.context.configuration.operationsLimit;
+ const limit = ["Sponsor", "Spend Sponsorship"].includes(op.name)
+ ? module.context.configuration.appsOperationsLimit
+ : module.context.configuration.operationsLimit;
operations.checkLimits(op, timeWindow, limit);
op.state = "init";
@@ -150,6 +151,7 @@ const handlers = {
});
data.createdAt = user.createdAt;
data.signingKeys = user.signingKeys;
+ data.requiredRecoveryNum = db.getRequiredRecoveryNum(id);
if (requestor && usersColl.exists(requestor)) {
const requestorConnections = db.userConnections(requestor, "outbound");
@@ -515,6 +517,13 @@ const handlers = {
},
});
},
+
+ peersGet: function (req, res) {
+ const conf = module.context.configuration;
+ res.send({
+ peers: conf.peers ? conf.peers.split(",") : [],
+ });
+ },
};
router
@@ -830,6 +839,11 @@ router
.response(schemas.sponsorshipGetResponse)
.error(404, "App generated id not found");
+router
+ .get("/peers", handlers.peersGet)
+ .summary("Gets other nodes this node trusts")
+ .response(schemas.peersGetResponse);
+
module.context.use(function (req, res, next) {
try {
next();
diff --git a/web_services/foxx/brightid/initdb.js b/web_services/foxx/brightid/initdb.js
index 73e8db5a..96ba16e0 100755
--- a/web_services/foxx/brightid/initdb.js
+++ b/web_services/foxx/brightid/initdb.js
@@ -52,6 +52,8 @@ const indexes = [
{ collection: "operations", fields: ["state"], type: "persistent" },
{ collection: "operations", fields: ["name"], type: "persistent" },
{ collection: "operations", fields: ["timestamp"], type: "persistent" },
+ { collection: "operations", fields: ["contextId"], type: "persistent" },
+ { collection: "operations", fields: ["appUserId"], type: "persistent" },
{
collection: "cachedParams",
fields: ["creationDate"],
diff --git a/web_services/foxx/brightid/manifest.json b/web_services/foxx/brightid/manifest.json
index 0c2a1ada..254bfe92 100755
--- a/web_services/foxx/brightid/manifest.json
+++ b/web_services/foxx/brightid/manifest.json
@@ -3,7 +3,7 @@
"name": "BrightID-Node",
"description": "Read and update the anonymous social graph stored on BrightID nodes.",
"license": "ISC",
- "version": "6.13.1",
+ "version": "6.14.0",
"tests": ["tests/*.js"],
"scripts": {
"setup": "initdb.js"
@@ -44,6 +44,16 @@
"type": "int",
"required": false
},
+ "appsOperationsLimit": {
+ "description": "Maximum number of operations each app can send in configured time window",
+ "type": "int",
+ "required": false
+ },
+ "peers": {
+ "description": "other nodes that this node trusts (comma-separated string)",
+ "type": "string",
+ "required": false
+ },
"development": {
"description": "true if the node is in development mode",
"type": "boolean",
diff --git a/web_services/foxx/brightid/manifest_apply.json b/web_services/foxx/brightid/manifest_apply.json
index ea92eefa..9b5211a3 100644
--- a/web_services/foxx/brightid/manifest_apply.json
+++ b/web_services/foxx/brightid/manifest_apply.json
@@ -2,7 +2,7 @@
"main": "apply.js",
"name": "apply",
"description": "Allows BrightID consensus module to apply operations to the database.",
- "version": "6.13.1",
+ "version": "6.14.0",
"scripts": {
"setup": "initdb.js"
}
diff --git a/web_services/foxx/brightid/operations.js b/web_services/foxx/brightid/operations.js
index df81bf01..abbb64de 100755
--- a/web_services/foxx/brightid/operations.js
+++ b/web_services/foxx/brightid/operations.js
@@ -13,6 +13,7 @@ const errors = require("./errors");
const usersColl = arango._collection("users");
const operationCountersColl = arango._collection("operationCounters");
+const sponsorshipsColl = arango._collection("sponsorships");
const TIME_FUDGE = 60 * 60 * 1000; // timestamp can be this far in the future (milliseconds) to accommodate client/server clock differences
@@ -68,6 +69,7 @@ const senderAttrs = {
"Vouch Family": ["id"],
"Set Family Head": ["id"],
"Convert To Family": ["id"],
+ "Set Required Recovery Num": ["id"],
};
function checkLimits(op, timeWindow, limit) {
@@ -81,6 +83,24 @@ function checkLimits(op, timeWindow, limit) {
// 3) a bucket for all non-verified users without parent
// 4) a bucket for an app
// where parent is the first verified user that make connection with the user
+
+ if (op["name"] == "Spend Sponsorship") {
+ const app = db.getApp(op.app);
+ if (app.idsAsHex) {
+ op.appUserId = op.appUserId.toLowerCase();
+ }
+ const sponsorship = sponsorshipsColl.firstExample({
+ appId: op.appUserId,
+ });
+ if (!sponsorship) {
+ sender = "shared_apps";
+ } else if (sponsorship.spendRequested) {
+ throw new errors.SpendRequestedBeforeError();
+ } else if (!sponsorship.appHasAuthorized) {
+ sender = "shared_apps";
+ }
+ }
+
if (!["Sponsor", "Spend Sponsorship"].includes(op["name"])) {
if (!usersColl.exists(sender)) {
// this happens when operation is "Connect" and sender does not exist
@@ -147,6 +167,7 @@ const signerAndSigs = {
"Vouch Family": ["id", "sig"],
"Set Family Head": ["id", "sig"],
"Convert To Family": ["id", "sig"],
+ "Set Required Recovery Num": ["id", "sig"],
};
function verify(op) {
@@ -168,21 +189,30 @@ function verify(op) {
// there is no sig on this operation
return;
} else if (op.name == "Social Recovery") {
+ const requiredRecoveryNum = db.getRequiredRecoveryNum(op.id);
const recoveryConnections = db.getRecoveryConnections(op.id);
- if (op.id1 == op.id2) {
- throw new errors.DuplicateSignersError();
- }
- const rc1 = recoveryConnections.find((c) => c.id == op.id1);
- const rc2 = recoveryConnections.find((c) => c.id == op.id2);
- if (!rc1 || !rc2) {
- throw new errors.NotRecoveryConnectionsError();
- } else if (rc1.activeAfter != 0) {
- throw new errors.WaitForCooldownError(op.id1);
- } else if (rc2.activeAfter != 0) {
- throw new errors.WaitForCooldownError(op.id2);
+ const temp = new Set();
+ for (let i = 1; i <= requiredRecoveryNum; i++) {
+ if (!(`id${i}` in op)) {
+ throw new errors.WrongNumberOfSignersError(`id${i}`, requiredRecoveryNum);
+ }
+
+ if (temp.has(op[`id${i}`])) {
+ throw new errors.DuplicateSignersError();
+ }
+
+ const rc = recoveryConnections.find((c) => c.id == op[`id${i}`]);
+ if (!rc) {
+ throw new errors.NotRecoveryConnectionsError();
+ }
+
+ if (rc.activeAfter != 0) {
+ throw new errors.WaitForCooldownError(op[`id${i}`]);
+ }
+
+ verifyUserSig(message, op[`id${i}`], op[`sig${i}`]);
+ temp.add(op[`id${i}`]);
}
- verifyUserSig(message, op.id1, op.sig1);
- verifyUserSig(message, op.id2, op.sig2);
} else if (op.name == "Connect") {
verifyUserSig(message, op.id1, op.sig1);
if (op.requestProof) {
@@ -194,6 +224,7 @@ function verify(op) {
const sig = op[sigAttr];
verifyUserSig(message, signer, sig);
}
+
if (hash(message) != op.hash) {
throw new errors.InvalidOperationHashError();
}
@@ -242,6 +273,12 @@ function apply(op) {
return db.setFamilyHead(op.id, op.head, op.group, op.timestamp);
} else if (op["name"] == "Convert To Family") {
return db.convertToFamily(op.id, op.head, op.group, op.timestamp);
+ } else if (op["name"] == "Set Required Recovery Num") {
+ return db.setRequiredRecoveryNum(
+ op.id,
+ op.requiredRecoveryNum,
+ op.timestamp
+ );
} else {
throw new errors.InvalidOperationNameError(op["name"]);
}
@@ -255,6 +292,9 @@ function getMessage(op) {
"sig",
"sig1",
"sig2",
+ "sig3",
+ "sig4",
+ "sig5",
"hash",
"blockTime",
"n",
@@ -263,7 +303,10 @@ function getMessage(op) {
].includes(k)
) {
continue;
- } else if (op.name == "Social Recovery" && ["id1", "id2"].includes(k)) {
+ } else if (
+ op.name == "Social Recovery" &&
+ ["id1", "id2", "id3", "id4", "id5"].includes(k)
+ ) {
continue;
}
signedOp[k] = op[k];
diff --git a/web_services/foxx/brightid/package-lock.json b/web_services/foxx/brightid/package-lock.json
index f44b9d09..b4d71807 100644
--- a/web_services/foxx/brightid/package-lock.json
+++ b/web_services/foxx/brightid/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "brightid-foxx",
- "version": "6.13.1",
+ "version": "6.14.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
diff --git a/web_services/foxx/brightid/package.json b/web_services/foxx/brightid/package.json
index 3ba43bf7..b3d6e303 100755
--- a/web_services/foxx/brightid/package.json
+++ b/web_services/foxx/brightid/package.json
@@ -1,7 +1,7 @@
{
"name": "brightid-foxx",
"description": "Foxx service for managing BrightID connections",
- "version": "6.13.1",
+ "version": "6.14.0",
"dependencies": {
"base64-js": "^1.3.0",
"crypto-js": "^3.1.9-1",
diff --git a/web_services/foxx/brightid/schemas.js b/web_services/foxx/brightid/schemas.js
index 461b6c99..14cc56b2 100755
--- a/web_services/foxx/brightid/schemas.js
+++ b/web_services/foxx/brightid/schemas.js
@@ -138,6 +138,21 @@ const operations = {
.description(
"brightid of a recovery connection of the user represented by id"
),
+ id3: joi
+ .string()
+ .description(
+ "brightid of a recovery connection of the user represented by id"
+ ),
+ id4: joi
+ .string()
+ .description(
+ "brightid of a recovery connection of the user represented by id"
+ ),
+ id5: joi
+ .string()
+ .description(
+ "brightid of a recovery connection of the user represented by id"
+ ),
sig1: joi
.string()
.required()
@@ -150,6 +165,21 @@ const operations = {
.description(
"deterministic json representation of operation object signed by the recovery connection represented by id2"
),
+ sig3: joi
+ .string()
+ .description(
+ "deterministic json representation of operation object signed by the recovery connection represented by id2"
+ ),
+ sig4: joi
+ .string()
+ .description(
+ "deterministic json representation of operation object signed by the recovery connection represented by id2"
+ ),
+ sig5: joi
+ .string()
+ .description(
+ "deterministic json representation of operation object signed by the recovery connection represented by id2"
+ ),
},
Sponsor: {
appUserId: joi
@@ -372,6 +402,27 @@ const operations = {
"deterministic json representation of operation object signed by the head user represented by id"
),
},
+ "Set Required Recovery Num": {
+ id: joi
+ .string()
+ .required()
+ .description(
+ "brightid of the user who is setting the required number of signatures for social recovery"
+ ),
+ requiredRecoveryNum: joi
+ .number()
+ .integer()
+ .greater(1)
+ .less(6)
+ .required()
+ .description("the required number of signatures for social recovery"),
+ sig: joi
+ .string()
+ .required()
+ .description(
+ "deterministic json representation of operation object signed by the head user represented by id"
+ ),
+ },
};
Object.keys(operations).forEach((name) => {
@@ -623,6 +674,11 @@ schemas = Object.assign(
.required()
.description("list of recovery connections for the user"),
sponsored: joi.boolean().required().description("if user is sponsored"),
+ requiredRecoveryNum: joi
+ .number()
+ .integer()
+ .required()
+ .description("the required number of signatures for social recovery"),
mutualConnections: joi
.array()
.items(joi.string())
@@ -855,9 +911,7 @@ schemas = Object.assign(
count: joi
.number()
.required()
- .description(
- "the number of app generated ids"
- ),
+ .description("the number of app generated ids"),
})
),
}),
@@ -886,6 +940,16 @@ schemas = Object.assign(
.description("the sponsorship timestamp"),
}),
}),
+
+ peersGetResponse: joi.object({
+ data: joi.object({
+ peers: joi
+ .array()
+ .items(joi.string())
+ .required()
+ .description("list of other nodes that this node trusts"),
+ }),
+ }),
},
schemas
);
diff --git a/web_services/foxx/brightid/tests/connections.js b/web_services/foxx/brightid/tests/connections.js
index 470a1e1a..67c4c278 100755
--- a/web_services/foxx/brightid/tests/connections.js
+++ b/web_services/foxx/brightid/tests/connections.js
@@ -367,7 +367,7 @@ describe("recovery connections", function () {
const recoveryConnections = db.getRecoveryConnections("a", "outbound");
recoveryConnections
.find((c) => c.id == "c")
- .activeBefore.should.be.greaterThan(0);
+ .activeBefore.should.be.least(0);
const activeRecoveryConnection = recoveryConnections
.filter((conn) => {
return conn.isActive;
diff --git a/web_services/foxx/brightid/tests/operations.js b/web_services/foxx/brightid/tests/operations.js
index 225debeb..19967651 100755
--- a/web_services/foxx/brightid/tests/operations.js
+++ b/web_services/foxx/brightid/tests/operations.js
@@ -364,7 +364,7 @@ describe("operations", function () {
.level.should.equal("just met");
});
- it('should be able to "Social Recovery"', function () {
+ it('should be able to "Social Recovery" with 2 signers by default', function () {
connect(u2, u1, "already known");
connect(u3, u1, "already known");
connect(u1, u2, "recovery");
@@ -376,6 +376,84 @@ describe("operations", function () {
id: u1.id,
id1: u2.id,
id2: u3.id,
+ signingKey: u7.signingKey,
+ timestamp,
+ };
+ const message = getMessage(op);
+ op.sig1 = uInt8ArrayToB64(
+ Object.values(nacl.sign.detached(strToUint8Array(message), u2.secretKey))
+ );
+ op.sig2 = uInt8ArrayToB64(
+ Object.values(nacl.sign.detached(strToUint8Array(message), u3.secretKey))
+ );
+ apply(op);
+ usersColl.document(u1.id).signingKeys.should.deep.equal([u7.signingKey]);
+ u1.secretKey = u7.secretKey;
+ });
+
+ it('should be able to "Set Required Recovery Num"', function () {
+ connect(u8, u1, "already known");
+ connect(u1, u8, "recovery");
+ const op = {
+ v: 6,
+ name: "Set Required Recovery Num",
+ id: u1.id,
+ requiredRecoveryNum: 3,
+ timestamp: Date.now(),
+ };
+ const message = getMessage(op);
+ op.sig = uInt8ArrayToB64(
+ Object.values(nacl.sign.detached(strToUint8Array(message), u1.secretKey))
+ );
+ apply(op);
+ const user = usersColl.document(u1.id);
+ user.nextRequiredRecoveryNum.should.equal(3);
+ user.requiredRecoveryNumSetAfter.should.be.at.most(
+ Date.now() + 7 * 24 * 60 * 60 * 1000
+ );
+ });
+
+ it('should not be able to "Social Recovery" by wrong number of signers', function () {
+ // the 'requiredRecoveryNum' will set after 7 days so we put it manually for test
+ const user = usersColl.document(u1.id);
+ user.requiredRecoveryNum = user.nextRequiredRecoveryNum;
+ delete user.nextRequiredRecoveryNum;
+ delete user.requiredRecoveryNumSetAfter;
+ usersColl.replace(u1.id, user);
+
+ const op = {
+ v: 6,
+ name: "Social Recovery",
+ id: u1.id,
+ id1: u2.id,
+ id2: u3.id,
+ signingKey: u4.signingKey,
+ timestamp: Date.now(),
+ };
+ const message = getMessage(op);
+ op.sig1 = uInt8ArrayToB64(
+ Object.values(nacl.sign.detached(strToUint8Array(message), u2.secretKey))
+ );
+ op.sig2 = uInt8ArrayToB64(
+ Object.values(nacl.sign.detached(strToUint8Array(message), u3.secretKey))
+ );
+
+ const resp = request.post(`${baseUrl}/operations`, {
+ body: op,
+ json: true,
+ });
+ resp.json.errorNum.should.equal(errors.WRONG_NUMBER_OF_SIGNERS);
+ });
+
+ it('should be able to "Social Recovery" with the required number of signers (3)', function () {
+ const timestamp = Date.now();
+ const op = {
+ v: 6,
+ name: "Social Recovery",
+ id: u1.id,
+ id1: u2.id,
+ id2: u3.id,
+ id3: u8.id,
signingKey: u4.signingKey,
timestamp,
};
@@ -386,6 +464,9 @@ describe("operations", function () {
op.sig2 = uInt8ArrayToB64(
Object.values(nacl.sign.detached(strToUint8Array(message), u3.secretKey))
);
+ op.sig3 = uInt8ArrayToB64(
+ Object.values(nacl.sign.detached(strToUint8Array(message), u8.secretKey))
+ );
apply(op);
usersColl.document(u1.id).signingKeys.should.deep.equal([u4.signingKey]);
u1.secretKey = u4.secretKey;
@@ -847,16 +928,6 @@ describe("operations", function () {
resp1.json.data.spendRequested.should.equal(true);
resp1.json.data.appHasAuthorized.should.equal(false);
- let op2 = {
- name: "Spend Sponsorship",
- appUserId: appUserId.toLowerCase(),
- app: "idchain",
- timestamp: Date.now(),
- v: 6,
- };
- const opRes = apply(op2);
- opRes.json.result.errorNum.should.equal(errors.SPEND_REQUESTED_BEFORE);
-
let op3 = {
name: "Sponsor",
appUserId: appUserId.toLowerCase(),
diff --git a/web_services/foxx/brightid5.zip b/web_services/foxx/brightid5.zip
index 0f2a0d92..f0f00c3c 100644
Binary files a/web_services/foxx/brightid5.zip and b/web_services/foxx/brightid5.zip differ
diff --git a/web_services/foxx/brightid6.zip b/web_services/foxx/brightid6.zip
index 44dbbcd9..1dff28d6 100644
Binary files a/web_services/foxx/brightid6.zip and b/web_services/foxx/brightid6.zip differ
diff --git a/web_services/profile/app.js b/web_services/profile/app.js
index 80ce0ca8..cfe5337f 100644
--- a/web_services/profile/app.js
+++ b/web_services/profile/app.js
@@ -6,6 +6,7 @@ const NodeCache = require("node-cache");
const config = require("./config");
const { renderStats } = require("./stats");
const bn = require("bignum");
+const {TTLExtension, channel_expires_header} = require('./config')
const dataCache = new NodeCache(config.data_cache_config);
const channelCache = new NodeCache(config.channel_config);
@@ -21,6 +22,19 @@ if (config.is_dev) {
});
}
+/* Get remaining time to live of channel in seconds */
+const getRemainingTTL = (channelId) => {
+ // NodeCache.getTtl() actually returns a unix timestamp in ms(!) when channel will expire
+ const expirationTime = channelCache.getTtl(channelId);
+ const remainingTTL = expirationTime - Date.now();
+ return Math.floor(remainingTTL/1000)
+}
+
+/* Get expiration timestamp of channel as unix timestamp(seconds since 1970) */
+const getExpirationTimestamp = (channelId) => {
+ return Math.floor(channelCache.getTtl(channelId)/1000)
+}
+
app.get("/", function (req, res, next) {
res.send("BrightID socket server");
});
@@ -56,7 +70,8 @@ app.post("/upload/:channelId", function (req, res) {
const ttl = requestedTtl || config.defaultTTL;
let channel = channelCache.get(channelId);
- if (!channel) {
+ const channelExisting = !!channel
+ if (!channelExisting) {
// Create new channel.
channel = {
entries: new Map(),
@@ -66,57 +81,57 @@ app.post("/upload/:channelId", function (req, res) {
// save channel in cache with requested TTL
channelCache.set(channelId, channel, ttl);
console.log(`Created new channel ${channelId} with TTL ${channel.ttl}`);
- } else {
- // existing channel. check if this channel was about to expire, but got another upload
- if (channel.entries.size === 0) {
- console.log(
- `Restoring requested TTL ${channel.ttl} for channel ${channelId}`
- );
- channelCache.ttl(channelId, channel.ttl);
- }
}
// Check if there is already data with the provided uuid to prevent duplicates
const existingData = channel.entries.get(uuid);
if (existingData) {
- if (existingData === data) {
- console.log(
- `Received duplicate profile ${uuid} for channel ${channelId}`
- );
- // Workaround for recovery channels: interpret upload of existing data as request to extend TTL of channel
- // TODO: Remove ttl extension when client that knows how to create channels with longer ttl time is released
- channelCache.ttl(channelId, channel.ttl);
- res.status(201).json({ success: true });
- } else {
+ if (existingData !== data) {
// Same UUID but different content? This is scary. Likely client bug. Bail out.
res
- .status(500)
- .json({
- error: `Profile ${uuid} already exists in channel ${channelId} with different data.`,
- });
+ .status(500)
+ .json({
+ error: `Profile ${uuid} already exists in channel ${channelId} with different data.`,
+ })
+ return;
}
- return;
- }
-
- // check channel size
- const entrySize = sizeof(data) + sizeof(uuid);
- const newSize = channel.size + entrySize;
- console.log(
- `channel ${channelId} newSize: ${newSize},\t delta: ${entrySize} bytes`
- );
- if (newSize > config.channel_max_size_bytes) {
- // channel full :-(
- res
- .status(config.channel_limit_response_code)
- .json({ error: config.channel_limit_message });
- return;
+ console.log(
+ `Received duplicate profile ${uuid} for channel ${channelId}`,
+ )
}
- // save data in cache
try {
- channel.entries.set(uuid, data);
- channel.size = newSize;
+ if (!existingData) {
+ // check channel size
+ const entrySize = sizeof(data) + sizeof(uuid);
+ const newSize = channel.size + entrySize;
+ console.log(
+ `channel ${channelId} newSize: ${newSize},\t delta: ${entrySize} bytes`
+ );
+ if (newSize > config.channel_max_size_bytes) {
+ // channel full :-(
+ res
+ .status(config.channel_limit_response_code)
+ .json({ error: config.channel_limit_message });
+ return;
+ }
+
+ // save new data
+ channel.entries.set(uuid, data);
+ channel.size = newSize;
+ }
+
+ // extend channel TTL if necessary
+ if (channelExisting) {
+ const remainingTTL = getRemainingTTL(channelId)
+ if ( remainingTTL < TTLExtension) {
+ channelCache.ttl(channelId, TTLExtension)
+ console.log(`Extending TTL of channel ${channelId}. Old: ${remainingTTL} New: ${getRemainingTTL(channelId)}`)
+ }
+ }
+
res.status(201);
+ res.append(channel_expires_header, `${getExpirationTimestamp(channelId)}`)
res.json({ success: true });
} catch (e) {
console.log(err);
@@ -153,6 +168,8 @@ app.get("/download/:channelId/:uuid", function (req, res, next) {
return;
}
+ res.append(channel_expires_header, `${getExpirationTimestamp(channelId)}`)
+
res.json({
data: data,
});
@@ -201,40 +218,11 @@ app.delete("/:channelId/:uuid", function (req, res, next) {
// update channel size
channel.size -= sizeof(data) + sizeof(uuid);
-
console.log(
`Deleted ${uuid} from channel ${channelId}. New size: ${channel.size}`
);
- // handle removing of last entry
- if (channel.entries.size === 0) {
- // if channel is empty size should also be 0. Double-check.
- if (channel.size !== 0) {
- console.warn(
- `Channel size calculation incorrect. This should not happen.`
- );
- channel.size = 0;
- }
-
- // Reduce remaining TTL. Leave a few minutes TTL in case some upload is
- // hanging from a slow connection
- const expirationTime = channelCache.getTtl(channelId); // This actually returns a unix timestamp in ms(!) when channel will expire
- const remainingTTL = expirationTime - Date.now();
- if (remainingTTL > config.finalTTL) {
- console.log(
- `last element removed from channel ${channelId}. Reducing TTL from ${Math.floor(
- remainingTTL / 1000
- )} to ${config.finalTTL} secs.`
- );
- channelCache.ttl(channelId, config.finalTTL);
- } else {
- console.log(
- `last element removed from channel ${channelId}. Remaining TTL: ${remainingTTL}ms.`
- );
- channelCache.ttl(channelId, config.finalTTL);
- }
- }
-
+ res.append(channel_expires_header, `${getExpirationTimestamp(channelId)}`)
res.status(200);
res.json({ success: true });
});
@@ -250,8 +238,8 @@ app.get("/list/:channelId", function (req, res, next) {
// get channel
const channel = channelCache.get(channelId);
if (!channel) {
- // Don't fail when channel is not existing. Instead return empty array
- // res.status(404).json({error: `Channel ${channelId} not found`});
+ // It's a breaking change and should apply after the client update
+ // res.status(404).json({error: `channelId ${channelId} not found`})
res.json({
profileIds: [],
});
@@ -265,8 +253,10 @@ app.get("/list/:channelId", function (req, res, next) {
return;
}
+ res.append(channel_expires_header, `${getExpirationTimestamp(channelId)}`)
+
res.json({
- profileIds: Array.from(channel.entries.keys()), // channel.entries.keys()
+ profileIds: Array.from(channel.entries.keys()),
});
});
diff --git a/web_services/profile/config.js b/web_services/profile/config.js
index dbe6ba23..267d869a 100644
--- a/web_services/profile/config.js
+++ b/web_services/profile/config.js
@@ -6,7 +6,7 @@ const port = process.env.BN_WS_PROFILE_SERVICE_PORT || 3000;
const minTTL = 60; // 1 minute
const maxTTL = 60 * 60 * 24; // 24 hours
const defaultTTL = 60 * 15; // 15 minutes
-const finalTTL = 600; // 10 minutes grace period to keep empty channels open
+const TTLExtension = 600;
/* Cache config for channels */
const channel_config = {
@@ -29,14 +29,13 @@ const notification_service =
? process.env.NOTIFICATION_SERVICE_DEV
: process.env.NOTIFICATION_SERVICE_RELEASE;
-const channel_entry_limit = 30;
-
const channel_max_size_bytes = is_test
? 1024 // 1 kb when running jest tests
: 1024 * 1024 * 20; // 20 MegaByte normally
const channel_limit_response_code = 440;
const channel_limit_message = "Channel full";
+const channel_expires_header = "x-expires";
module.exports = {
is_dev,
@@ -44,12 +43,12 @@ module.exports = {
channel_config,
data_cache_config,
notification_service,
- channel_entry_limit,
channel_max_size_bytes,
channel_limit_response_code,
channel_limit_message,
- finalTTL,
+ channel_expires_header,
minTTL,
maxTTL,
defaultTTL,
+ TTLExtension,
};
diff --git a/web_services/profile/test/customTTL.test.js b/web_services/profile/test/customTTL.test.js
index 901929cd..66618a39 100644
--- a/web_services/profile/test/customTTL.test.js
+++ b/web_services/profile/test/customTTL.test.js
@@ -2,9 +2,9 @@ const { v4: uuidv4 } = require('uuid');
const request = require('supertest')
const app = require('../app')
const config = require('../config')
-const {channel_config} = require('../config')
+const {channel_config, channel_ttl_header, TTLExtension, channel_expires_header} = require('../config')
-jest.setTimeout(90000);
+jest.setTimeout(120000);
describe('Different TTL values', () => {
@@ -52,11 +52,13 @@ describe('Different TTL values', () => {
.send(profile)
.expect(201)
expect(res.body).toHaveProperty('success', true)
+ expect(res.header).toHaveProperty(channel_expires_header)
// channel should now list the expected profile
res = await request(app)
.get(`/list/${channel}`)
.expect(200)
+ expect(res.header).toHaveProperty(channel_expires_header)
let expectedResult = JSON.stringify({profileIds: [ profile.uuid ]})
expect(res.text).toEqual(expectedResult)
@@ -64,12 +66,11 @@ describe('Different TTL values', () => {
const msToExpiration = (requestedTtl + channel_config.checkperiod + 5 ) * 1000
await new Promise((r) => setTimeout(r, msToExpiration));
- // channel list should now be empty
+ // channel list should now result in 404
res = await request(app)
.get(`/list/${channel}`)
- .expect(200)
- expectedResult = JSON.stringify({profileIds: []})
- expect(res.text).toEqual(expectedResult)
+ .expect(404)
+ expect(res.body).toHaveProperty('error', `channelId ${channel} not found`)
})
it('should create a channel with max TTL', async () => {
@@ -79,18 +80,139 @@ describe('Different TTL values', () => {
uuid: uuidv4(),
requestedTtl: config.maxTTL
};
+ const expires = Math.floor((Date.now()/1000 + profile.requestedTtl))
const channel = uuidv4();
let res = await request(app)
.post(`/upload/${channel}`)
.send(profile)
.expect(201)
expect(res.body).toHaveProperty('success', true)
+ expect(res.header).toHaveProperty(channel_expires_header)
+ expect(parseInt(res.header[channel_expires_header])).toEqual(expires)
// channel should now list the expected profile
res = await request(app)
.get(`/list/${channel}`)
.expect(200)
+ expect(res.header).toHaveProperty(channel_expires_header)
+ expect(parseInt(res.header[channel_expires_header])).toEqual(expires)
let expectedResult = JSON.stringify({profileIds: [ profile.uuid ]})
expect(res.text).toEqual(expectedResult)
})
})
+
+describe('Custom expires header', () => {
+ const data = {
+ data: "Some data",
+ uuid: uuidv4(),
+ requestedTtl: 120 // 2 minutes
+ };
+
+ it('should provide x-expires header when creating a channel', async () => {
+ // create channel by uploading profile
+ const channel = uuidv4();
+ let res = await request(app)
+ .post(`/upload/${channel}`)
+ .send(data)
+ .expect(201)
+ const expires = Math.floor((Date.now()/1000 + data.requestedTtl))
+ expect(res.body).toHaveProperty('success', true)
+ expect(res.header).toHaveProperty(channel_expires_header)
+ expect(parseInt(res.header[channel_expires_header])).toEqual(expires)
+ });
+
+ describe('should provide x-expires header', () => {
+ const channel = uuidv4();
+ let expires
+
+ beforeAll(async () => {
+ // create channel by uploading profile
+ const res = await request(app)
+ .post(`/upload/${channel}`)
+ .send(data)
+ .expect(201)
+ expect(res.body).toHaveProperty('success', true)
+ expires = Math.floor((Date.now()/1000 + data.requestedTtl))
+ })
+
+ it('when listing a channel', async () => {
+ const res = await request(app)
+ .get(`/list/${channel}`)
+ .expect(200)
+ expect(res.header).toHaveProperty(channel_expires_header)
+ const returnedExpires = parseInt(res.header[channel_expires_header])
+ expect(returnedExpires).toEqual(expires)
+ });
+
+ it('when downloading an entry', async () => {
+ const res = await request(app)
+ .get(`/download/${channel}/${data.uuid}`)
+ .expect(200)
+ expect(res.header).toHaveProperty(channel_expires_header)
+ const returnedExpires = parseInt(res.header[channel_expires_header])
+ expect(returnedExpires).toEqual(expires)
+ });
+ })
+
+})
+
+describe('TTL extension', () => {
+ it('should extend channel TTL when uploading data', async() => {
+ // create channel with min TTL
+ const requestedTtl = config.minTTL
+ const data = {
+ data: "Profile A Data",
+ uuid: uuidv4(),
+ requestedTtl
+ };
+ const expires = Math.floor((Date.now()/1000 + data.requestedTtl))
+ const channel = uuidv4();
+
+ let res = await request(app)
+ .post(`/upload/${channel}`)
+ .send(data)
+ .expect(201)
+ expect(res.body).toHaveProperty('success', true)
+ expect(res.header).toHaveProperty(channel_expires_header)
+ expect(parseInt(res.header[channel_expires_header])).toEqual(expires)
+
+ // upload additional data
+ const moreData = {
+ data: "More Data",
+ uuid: uuidv4(),
+ };
+ res = await request(app)
+ .post(`/upload/${channel}`)
+ .send(moreData)
+ .expect(201)
+ expect(res.body).toHaveProperty('success', true)
+
+ // channel TTL should now be extended
+ expect(res.header).toHaveProperty(channel_expires_header)
+ let newExpires = parseInt(res.header[channel_expires_header])
+ expect(newExpires).toBeGreaterThan(expires)
+ expect(newExpires).toBeLessThanOrEqual(expires + TTLExtension)
+
+ // wait 2 seconds so TTL is below extension threshhold again
+ await new Promise((r) => setTimeout(r, 2000));
+
+ // upload additional data
+ const muchMoreData = {
+ data: "Much more Data",
+ uuid: uuidv4(),
+ };
+ res = await request(app)
+ .post(`/upload/${channel}`)
+ .send(muchMoreData)
+ .expect(201)
+ expect(res.body).toHaveProperty('success', true)
+
+ // channel TTL should now be extended again
+ expect(res.header).toHaveProperty(channel_expires_header)
+ let prevExpires = newExpires
+ newExpires = parseInt(res.header[channel_expires_header])
+ expect(newExpires).toBeGreaterThan(prevExpires)
+ expect(newExpires).toBeLessThanOrEqual(prevExpires + TTLExtension)
+
+ })
+})
diff --git a/web_services/profile/test/duplicateProfiles.test.js b/web_services/profile/test/duplicateProfiles.test.js
index 120d7468..a35ed58a 100644
--- a/web_services/profile/test/duplicateProfiles.test.js
+++ b/web_services/profile/test/duplicateProfiles.test.js
@@ -9,6 +9,8 @@ const profileA = {
const channel = uuidv4();
+jest.setTimeout(120000);
+
describe('duplicate profiles', () => {
beforeAll(async() =>{
diff --git a/web_services/profile/test/invalidChannel.test.js b/web_services/profile/test/invalidChannel.test.js
new file mode 100644
index 00000000..3dc25817
--- /dev/null
+++ b/web_services/profile/test/invalidChannel.test.js
@@ -0,0 +1,20 @@
+const { v4: uuidv4 } = require('uuid');
+const request = require('supertest')
+const app = require('../app')
+
+const profileA = {
+ data: "Profile A Data",
+ uuid: uuidv4(),
+};
+const channel = uuidv4();
+
+describe('Invalid channel', () => {
+
+ it('should return 404 when listing invalid channel', async () => {
+ const res = await request(app)
+ .get(`/list/${channel}`)
+ .expect(404)
+ expect(res.body).toHaveProperty('error', `channelId ${channel} not found`)
+ })
+
+})
diff --git a/web_services/profile/test/remove_content.test.js b/web_services/profile/test/remove_content.test.js
index c2f3b4e3..8b36b3e5 100644
--- a/web_services/profile/test/remove_content.test.js
+++ b/web_services/profile/test/remove_content.test.js
@@ -1,6 +1,7 @@
const { v4: uuidv4 } = require('uuid');
const request = require('supertest')
const app = require('../app')
+const {channel_ttl_header, channel_expires_header} = require('../config')
const setupChannel = async (numEntries) => {
channelId = uuidv4();
@@ -21,8 +22,8 @@ const setupChannel = async (numEntries) => {
.get(`/list/${channelId}`)
.expect(200)
expect(res.body.profileIds).toHaveLength(numEntries);
-
- return {channelId, channelEntries}
+ expect(res.header).toHaveProperty(channel_expires_header)
+ return {channelId, channelEntries, expires: parseInt(res.header[channel_expires_header])}
}
describe('Remove items from channel', () => {
@@ -101,22 +102,24 @@ describe('Remove items from channel', () => {
describe('Delete all entries', () => {
+ let channelExpires;
+
// Setup random channel
beforeAll(async ()=>{
const channelData = await setupChannel(numEntries)
channelId = channelData.channelId
channelEntries = channelData.channelEntries
+ channelExpires = channelData.expires
})
it('should delete all entries', async () => {
+ let newExpires
for (let i=0; i < numEntries; i++) {
const deleteResult = await request(app)
.delete(`/${channelId}/${channelEntries[i].uuid}`)
.expect(200)
}
- })
-
- it('Should return empty channel list', async () => {
+ // Should return empty channel list now
const res = await request(app)
.get(`/list/${channelId}`)
.expect(200)