Refactor contribution serializer and validation #94

Merged
raucao merged 4 commits from bugfix/schema_changes into master 2019-04-12 18:54:57 +00:00
8 changed files with 112 additions and 77 deletions

View File

@ -1,16 +1,18 @@
const contractCalls = [
['Contributor', 'add', [{ account: '0x7e8f313c56f809188313aa274fa67ee58c31515d', name: 'bumi', isCore: true, kind: 'person', url: '', github_username: 'bumi', github_uid: 318, wiki_username: 'bumi' }, { gasLimit: 200000 }]],
['Contributor', 'add', [{ account: '0x49575f3DD9a0d60aE661BC992f72D837A77f05Bc', name: 'raucao', isCore: true, kind: 'person', url: '', github_username: 'skddc', github_uid: 842, wiki_username: 'raucau' }, { gasLimit: 200000 }]],
['Proposal', 'addProposal', [{ contributorId: 1, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Hacked on kredits', url: '' }, { gasLimit: 350000 }]],
['Contributor', 'add', [{ account: '0x7e8f313c56f809188313aa274fa67ee58c31515d', name: 'bumi', kind: 'person', url: '', github_username: 'bumi', github_uid: 318, wiki_username: 'bumi' }, { gasLimit: 200000 }]],
['Contributor', 'add', [{ account: '0x49575f3DD9a0d60aE661BC992f72D837A77f05Bc', name: 'raucao', kind: 'person', url: '', github_username: 'skddc', github_uid: 842, wiki_username: 'raucau' }, { gasLimit: 200000 }]],
['Proposal', 'addProposal', [{ contributorId: 1, contributorIpfsHash: 'QmWKCYGr2rSf6abUPaTYqf98urvoZxGrb7dbspFZA6oyVF', date: '2019-04-09', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-10', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-11', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Hacked on kredits', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'vote', [1, { gasLimit: 550000 }]],
['Contribution', 'addContribution', [{ contributorId: 1, amount: 5000, kind: 'dev', description: '[67P/kredits-contracts] Introduce contribution token', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 2, amount: 1500, kind: 'dev', description: '[67P/kredits-web] Reviewed stuff', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 1, contributorIpfsHash: 'QmWKCYGr2rSf6abUPaTYqf98urvoZxGrb7dbspFZA6oyVF', date: '2019-04-11', amount: 5000, kind: 'dev', description: '[67P/kredits-contracts] Introduce contribution token', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-11', amount: 1500, kind: 'dev', description: '[67P/kredits-web] Reviewed stuff', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'claim', [1, { gasLimit: 300000 }]]
];
const funds = [
'0x7e8f313c56f809188313aa274fa67ee58c31515d',
'0xa502eb4021f3b9ab62f75b57a94e1cfbf81fd827'
];
module.exports = { contractCalls, funds };

View File

@ -1,20 +1,5 @@
const ethers = require('ethers');
const schemas = require('kosmos-schemas');
const tv4 = require('tv4');
const validator = tv4.freshApi();
validator.addFormat({
'date': function(value) {
const dateRegexp = /^[0-9]{4,}-[0-9]{2}-[0-9]{2}$/;
return dateRegexp.test(value) ? null : "A valid ISO 8601 full-date string is expected";
},
'time': function(value) {
const timeRegexp = /^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(([Zz])|([\+|\-]([01][0-9]|2[0-3]):[0-5][0-9]))$/;
return timeRegexp.test(value) ? null : "A valid ISO 8601 full-time string is expected";
}
})
const ContributionSerializer = require('../serializers/contribution');
const Base = require('./base');
@ -62,12 +47,13 @@ class Contribution extends Base {
});
}
addContribution(contributionAttr, callOptions = {}) {
let jsonStr = ContributionSerializer.serialize(contributionAttr);
async addContribution(contributionAttr, callOptions = {}) {
const contribution = new ContributionSerializer(contributionAttr);
// Validate JSON document against schema
let result = validator.validate(JSON.parse(jsonStr), schemas['contribution']);
if (!result) { return Promise.reject(validator.error); }
try { await contribution.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contribution.serialize();
return this.ipfs
.add(jsonStr)

View File

@ -25,12 +25,16 @@ class Proposal extends Base {
});
}
addProposal(proposalAttr, callOptions = {}) {
let json = ContributionSerializer.serialize(proposalAttr);
// TODO: validate against schema
async addProposal(proposalAttr, callOptions = {}) {
const contribution = new ContributionSerializer(proposalAttr);
try { await contribution.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contribution.serialize();
return this.ipfs
.add(json)
.add(jsonStr)
.then((ipfsHashAttr) => {
let proposal = [
proposalAttr.contributorId,

View File

@ -1,15 +1,69 @@
const schemas = require('kosmos-schemas');
const validator = require('../utils/validator');
/**
* Handle serialization for JSON-LD object of the contribution, according to
* https://github.com/67P/kosmos-schemas/blob/master/schemas/contribution.json
* Serialization and validation for JSON-LD document of the contribution.
*
* @class
* @public
*/
class Contribution {
constructor(attrs) {
Object.keys(attrs).forEach(a => this[a] = attrs[a]);
}
/**
* Serialize object to JSON
*
* @public
*/
serialize () {
let {
contributorIpfsHash,
date,
time,
kind,
description,
url,
details
} = this;
let data = {
"@context": "https://schema.kosmos.org",
"@type": "Contribution",
"contributor": {
"ipfs": contributorIpfsHash
},
date,
time,
kind,
description,
"details": details || {}
};
if (url) {
data["url"] = url;
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
/**
* Validate serialized data against schema
fsmanuel commented 2019-04-11 11:58:35 +00:00 (Migrated from github.com)
Review

What about:

  serialize (stringify=true) {
    // the current implementation

    return stringify ? JSON.stringify(data, null, 2) : data;
  }

  validate () {
    const serialized = this.serialize(false);
    const valid = validator.validate(serialized, schemas['contribution']);
    return valid ? Promise.resolve() : Promise.reject(validator.error);
  }
What about: ```js serialize (stringify=true) { // the current implementation return stringify ? JSON.stringify(data, null, 2) : data; } validate () { const serialized = this.serialize(false); const valid = validator.validate(serialized, schemas['contribution']); return valid ? Promise.resolve() : Promise.reject(validator.error); } ```
raucao commented 2019-04-12 07:07:34 +00:00 (Migrated from github.com)
Review

Let's try ro create smaller PRs for suggestions like this, instead of having them hold up larger branches from being merged.

(E.g. personally, I don't like unnamed options, so I'd probably propose to use an options object for this. But that's a completely different discussion than if the schema validation here is implemented good enough in general or not.)

What do you think?

Let's try ro create smaller PRs for suggestions like this, instead of having them hold up larger branches from being merged. (E.g. personally, I don't like unnamed options, so I'd probably propose to use an options object for this. But that's a completely different discussion than if the schema validation here is implemented good enough in general or not.) What do you think?
fsmanuel commented 2019-04-12 11:27:10 +00:00 (Migrated from github.com)
Review

Well I think that the initial idea to move the validation into the serializer was to get rid of the JSON.stringify ->JSON.parse thing for the validation. My suggestion is focusing on how we can prevent this.
Another thing I don't understand is why you removed the static function and don't use the class at all.

Well I think that the initial idea to move the validation into the serializer was to get rid of the `JSON.stringify` ->`JSON.parse` thing for the validation. My suggestion is focusing on how we can prevent this. Another thing I don't understand is why you removed the static function and don't use the class at all.
raucao commented 2019-04-12 11:32:57 +00:00 (Migrated from github.com)
Review

Well I think that the initial idea to move the validation into the serializer was to get rid of the JSON.stringify ->JSON.parse thing for the validation. My suggestion is focusing on how we can prevent this.

That's not at all how I understood the idea. I merely refactored everything to be both shared modules and also do the validation in the class instead of outside.

Another thing I don't understand is why you removed the static function and don't use the class at all.

Not sure I understand. It's using only the class now, because I added more functions to it. I turned the static function into a prototype function, because that's actually using the class with prototype properties then.

> Well I think that the initial idea to move the validation into the serializer was to get rid of the `JSON.stringify` ->`JSON.parse` thing for the validation. My suggestion is focusing on how we can prevent this. That's not at all how I understood the idea. I merely refactored everything to be both shared modules and also do the validation in the class instead of outside. > Another thing I don't understand is why you removed the static function and don't use the class at all. Not sure I understand. It's using only the class now, because I added more functions to it. I turned the static function into a prototype function, because that's actually using the class with prototype properties then.
raucao commented 2019-04-12 11:33:53 +00:00 (Migrated from github.com)
Review

By the way, in a way I also like validating exactly what's being stored in IPFS. I don't see it as a problem, and I don't see why a refactoring of that should keep us from merging now, as explained. I'm open to good reasons why the code in this PR doesn't work or is so bad it needs to be changed. But frankly, it seems like wasting time to me. This refactoring wouldn't even affect the code using the class from the outside. Let alone clients using the wrapper.

By the way, in a way I also like validating *exactly* what's being stored in IPFS. I don't see it as a problem, and I don't see why a refactoring of that should keep us from merging now, as explained. I'm open to good reasons why the code in this PR doesn't work or is so bad it needs to be changed. But frankly, it seems like wasting time to me. This refactoring wouldn't even affect the code using the class from the outside. Let alone clients using the wrapper.
fsmanuel commented 2019-04-12 12:33:24 +00:00 (Migrated from github.com)
Review

I was referring to this https://github.com/67P/kredits-contracts/pull/92#discussion_r274168386
So my suggestion adds little to archive this. I modified to code above to better illustrate the idea. If you disagree merge it as is.

Let's try to create smaller PRs for suggestions like this, instead of having them hold up larger branches from being merged.

I think reviews are the place to raise this discussions, aren't they?

I was referring to this https://github.com/67P/kredits-contracts/pull/92#discussion_r274168386 So my suggestion adds little to archive this. I modified to code above to better illustrate the idea. If you disagree merge it as is. > Let's try to create smaller PRs for suggestions like this, instead of having them hold up larger branches from being merged. I think reviews are the place to raise this discussions, aren't they?
fsmanuel commented 2019-04-12 12:35:00 +00:00 (Migrated from github.com)
Review

That being said. I like your implementation!

That being said. I like your implementation!
*
* @public
*/
validate () {
const serialized = JSON.parse(this.serialize());
const valid = validator.validate(serialized, schemas['contribution']);
return valid ? Promise.resolve() : Promise.reject(validator.error);
}
/**
* Deserialize JSON to object
*
* @method
* @public
*/
static deserialize (serialized) {
@ -33,43 +87,6 @@ class Contribution {
};
}
/**
* Serialize object to JSON
*
* @method
* @public
*/
static serialize(deserialized) {
let {
contributorIpfsHash,
date,
time,
kind,
description,
url,
details
} = deserialized;
let data = {
"@context": "https://schema.kosmos.org",
"@type": "Contribution",
"contributor": {
"ipfs": contributorIpfsHash
},
date,
time,
kind,
description,
"details": details || {}
};
if (url) {
data["url"] = url;
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
}
module.exports = Contribution;

15
lib/utils/validator.js Normal file
View File

@ -0,0 +1,15 @@
const tv4 = require('tv4');
const validator = tv4.freshApi();
validator.addFormat({
'date': function(value) {
const dateRegexp = /^[0-9]{4,}-[0-9]{2}-[0-9]{2}$/;
return dateRegexp.test(value) ? null : "A valid ISO 8601 full-date string is expected";
},
'time': function(value) {
const timeRegexp = /^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(([Zz])|([\+|\-]([01][0-9]|2[0-3]):[0-5][0-9]))$/;
return timeRegexp.test(value) ? null : "A valid ISO 8601 full-time string is expected";
}
})
module.exports = validator;

View File

@ -1,4 +1,5 @@
const promptly = require('promptly');
const { inspect } = require('util');
const initKredits = require('./helpers/init_kredits.js');
@ -25,23 +26,31 @@ module.exports = async function(callback) {
}
console.log(`Creating a proposal for contributor ID #${contributorId} account: ${contributorAccount}`);
[ dateNow, timeNow ] = (new Date()).toISOString().split('T');
let contributionAttributes = {
contributorId,
date: dateNow,
time: timeNow,
amount: await promptly.prompt('Amount: '),
description: await promptly.prompt('Description: '),
kind: await promptly.prompt('Kind: ', { default: 'dev' }),
url: await promptly.prompt('URL: ', { default: '' })
}
const contributorData = await kredits.Contributor.getById(contributorId);
contributionAttributes.contributorIpfsHash = contributorData.ipfsHash;
console.log("\nAdding proposal:");
console.log(contributionAttributes);
kredits.Proposal.addProposal(contributionAttributes, { gasLimit: 300000 }).then((result) => {
kredits.Proposal.addProposal(contributionAttributes, { gasLimit: 300000 })
.then((result) => {
console.log("\n\nResult:");
console.log(result);
callback();
}).catch((error) => {
console.log('Failed to create proposal');
callback(error);
callback(inspect(error));
});
}

View File

@ -15,7 +15,7 @@ module.exports = async function(callback) {
console.log(`Using Contribution at: ${kredits.Contribution.contract.address}`);
const table = new Table({
head: ['ID', 'Contributor ID', 'Description', 'Amount', 'Confirmed?', 'Vetoed?', 'Claimed?']
head: ['ID', 'Contributor ID', 'Description', 'Amount', 'Confirmed?', 'Vetoed?', 'Claimed?', 'IPFS']
})
try {
@ -33,6 +33,7 @@ module.exports = async function(callback) {
confirmed,
c.vetoed,
c.claimed,
c.ipfsHash
])
});

View File

@ -17,7 +17,7 @@ module.exports = async function(callback) {
const table = new Table({
head: ['ID', 'Account', 'Core?', 'Name', 'Balance']
head: ['ID', 'Account', 'Name', 'Core?', 'Balance', 'IPFS']
})
let contributors = await kredits.Contributor.all()
@ -26,9 +26,10 @@ module.exports = async function(callback) {
table.push([
c.id.toString(),
c.account,
c.isCore,
`${c.name}`,
ethers.utils.formatEther(c.balance)
c.isCore,
ethers.utils.formatEther(c.balance),
c.ipfsHash
])
})
console.log(table.toString())