Compare commits

..

2 Commits

Author SHA1 Message Date
3d0abe9027 hack hack apm 2019-04-11 01:31:47 +02:00
4fb8f1dc86 hack hack apm 2019-04-11 01:30:05 +02:00
52 changed files with 735 additions and 43043 deletions

View File

@@ -1,32 +0,0 @@
module.exports = {
'env': {
'browser': true,
'es6': true,
'node': true
},
'extends': 'eslint:recommended',
'globals': {
'Atomics': 'readonly',
'SharedArrayBuffer': 'readonly'
},
'parserOptions': {
'ecmaVersion': 2018,
'sourceType': 'module'
},
'rules': {
'comma-dangle': ['error', {
arrays: 'always-multiline',
objects: 'always-multiline',
imports: 'never',
exports: 'never',
functions: 'ignore',
}],
'eol-last': ['error', 'always'],
semi: ['error', 'always'],
'space-before-function-paren': ['error', {
anonymous: 'never',
named: 'always',
asyncArrow: 'always',
}],
}
}

2
.gitignore vendored
View File

@@ -4,5 +4,3 @@ node_modules
**/node_modules **/node_modules
.ganache-db .ganache-db
.tm_properties .tm_properties
yarn-error.log
.DS_Store

View File

@@ -1,9 +0,0 @@
{
"extends": [
"solhint:default",
"solhint:recommended"
],
"rules": {
"indent": "2"
}
}

View File

@@ -1,26 +0,0 @@
---
language: node_js
node_js:
- "11"
sudo: false
dist: xenial
cache:
yarn: true
before_install:
- curl -o- -L https://yarnpkg.com/install.sh | bash
- export PATH=$HOME/.yarn/bin:$PATH
install:
- yarn install --no-lockfile --non-interactive
script:
- yarn lint:wrapper
- yarn lint:contract-tests
# - yarn lint:contracts
branches:
only:
- master

View File

@@ -36,7 +36,7 @@ development ganache.
$ npm run devchain (or aragon devchain --port 7545) $ npm run devchain (or aragon devchain --port 7545)
To clear/reset the chain use (e.g. if you run out of funds on your devchain) To clear/reset the chain use:
$ npm run devchain -- --reset (or aragon devchain --port 7545 --reset) $ npm run devchain -- --reset (or aragon devchain --port 7545 --reset)
@@ -79,9 +79,9 @@ Kredits DAO independently.
![](docs/kredits-diagram.png) ![](docs/kredits-diagram.png)
A DAO can be deployed using the `scripts/deploy-kit.js` script or with the A DAO can be deployed using the `scripts/deploy-kit.js` script or with the `npm
`npm run deploy:dao` command. This deploys a new Kredits DAO, installs the run deploy:dao` command. This deploys a new Kredits DAO, installs the latest
latest app versions and sets the required permissions. app versions and sets the required permissions.
See each app in `/apps/*` for details. See each app in `/apps/*` for details.
@@ -115,11 +115,11 @@ Script to add a new entries to the contracts using the JS wrapper
$ truffle exec scripts/add-{contributor, contribution, proposal}.js $ truffle exec scripts/add-{contributor, contribution, proposal}.js
### list-{contributors, contributions, proposals}.js ### list-{contributor, contribution, proposal}.js
List contract entries List contract entries
$ truffle exec scripts/list-{contributors, contributions, proposals}.js $ truffle exec scripts/list-{contributor, contribution, proposal}.js
### send-funds.js ### send-funds.js

View File

@@ -1,12 +0,0 @@
module.exports = {
'globals': {
contract: true,
describe: true,
it: true,
},
rules: {
'no-unused-vars': ['error', {
'argsIgnorePattern': '^_',
}],
}
}

View File

@@ -118,26 +118,6 @@ contract Contribution is AragonApp {
// Custom functions // Custom functions
// //
function totalKreditsEarned(bool confirmedOnly) public view returns (uint256 count) {
for (uint32 i = 1; i <= contributionsCount; i++) {
ContributionData memory c = contributions[i];
if (block.number >= c.confirmedAtBlock || !confirmedOnly) {
count += c.amount; // should use safemath
}
}
}
function totalKreditsEarnedByContributor(uint32 contributorId, bool confirmedOnly) public view returns (uint256 count) {
uint256 tokenBalance = ownedContributions[contributorId].length;
for (uint256 i = 0; i < tokenBalance; i++) {
uint32 cId = ownedContributions[contributorId][i];
ContributionData memory c = contributions[cId];
if (block.number >= c.confirmedAtBlock || !confirmedOnly) {
count += c.amount; // should use safemath
}
}
}
function getContribution(uint32 contributionId) public view returns (uint32 id, uint32 contributorId, uint32 amount, bool claimed, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, uint256 confirmedAtBlock, bool exists, bool vetoed) { function getContribution(uint32 contributionId) public view returns (uint32 id, uint32 contributorId, uint32 amount, bool claimed, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, uint256 confirmedAtBlock, bool exists, bool vetoed) {
id = contributionId; id = contributionId;
ContributionData storage c = contributions[id]; ContributionData storage c = contributions[id];

View File

@@ -11,10 +11,10 @@
"start": "npm run start:aragon:ipfs", "start": "npm run start:aragon:ipfs",
"start:aragon:ipfs": "aragon run", "start:aragon:ipfs": "aragon run",
"start:aragon:http": "aragon run --http localhost:8001 --http-served-from ./dist", "start:aragon:http": "aragon run --http localhost:8001 --http-served-from ./dist",
"start:app": "", "start:app": "npm run sync-assets && npm run build:script -- --no-minify && parcel serve app/index.html -p 8001 --out-dir dist/ --no-cache",
"test": "aragon contracts test", "test": "aragon contracts test",
"compile": "aragon contracts compile", "compile": "aragon contracts compile",
"sync-assets": "", "sync-assets": "copy-aragon-ui-assets -n aragon-ui ./dist",
"build:app": "", "build:app": "",
"build:script": "", "build:script": "",
"build": "", "build": "",

View File

@@ -1,5 +1,5 @@
// const Contribution = artifacts.require('Contribution.sol'); const CounterApp = artifacts.require('Contribution.sol')
contract('Contribution', (_accounts) => { contract('Contribution', (accounts) => {
it('should be tested'); it('should be tested')
}); })

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,6 @@ import "@aragon/os/contracts/kernel/IKernel.sol";
interface ITokenBalance { interface ITokenBalance {
function balanceOf(address contributorAccount) public view returns (uint256); function balanceOf(address contributorAccount) public view returns (uint256);
} }
interface IContributionBalance {
function totalKreditsEarnedByContributor(uint32 contributorId, bool confirmedOnly) public view returns (uint256 count);
function balanceOf(address owner) public view returns (uint256);
}
contract Contributor is AragonApp { contract Contributor is AragonApp {
bytes32 public constant KERNEL_APP_ADDR_NAMESPACE = 0xd6f028ca0e8edb4a8c9757ca4fdccab25fa1e0317da1188108f7d2dee14902fb; bytes32 public constant KERNEL_APP_ADDR_NAMESPACE = 0xd6f028ca0e8edb4a8c9757ca4fdccab25fa1e0317da1188108f7d2dee14902fb;
@@ -47,12 +43,6 @@ contract Contributor is AragonApp {
return k.getApp(KERNEL_APP_ADDR_NAMESPACE, appIds[uint8(Apps.Token)]); return k.getApp(KERNEL_APP_ADDR_NAMESPACE, appIds[uint8(Apps.Token)]);
} }
function getContributionContract() public view returns (address) {
IKernel k = IKernel(kernel());
return k.getApp(KERNEL_APP_ADDR_NAMESPACE, appIds[uint8(Apps.Contribution)]);
}
function coreContributorsCount() view public returns (uint32) { function coreContributorsCount() view public returns (uint32) {
uint32 count = 0; uint32 count = 0;
for (uint32 i = 1; i <= contributorsCount; i++) { for (uint32 i = 1; i <= contributorsCount; i++) {
@@ -128,7 +118,7 @@ contract Contributor is AragonApp {
return contributors[id]; return contributors[id];
} }
function getContributorById(uint32 _id) public view returns (uint32 id, address account, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, bool isCore, uint256 balance, uint256 totalKreditsEarned, uint256 contributionsCount, bool exists ) { function getContributorById(uint32 _id) public view returns (uint32 id, address account, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, bool isCore, uint256 balance, bool exists ) {
id = _id; id = _id;
Contributor storage c = contributors[_id]; Contributor storage c = contributors[_id];
account = c.account; account = c.account;
@@ -138,9 +128,6 @@ contract Contributor is AragonApp {
isCore = isCoreTeam(id); isCore = isCoreTeam(id);
address token = getTokenContract(); address token = getTokenContract();
balance = ITokenBalance(token).balanceOf(c.account); balance = ITokenBalance(token).balanceOf(c.account);
address contribution = getContributionContract();
totalKreditsEarned = IContributionBalance(contribution).totalKreditsEarnedByContributor(_id, true);
contributionsCount = IContributionBalance(contribution).balanceOf(c.account);
exists = c.exists; exists = c.exists;
} }

View File

@@ -1,5 +1,5 @@
// const Contributor = artifacts.require('Contributor.sol'); const CounterApp = artifacts.require('CounterApp.sol')
contract('Contributor', (_accounts) => { contract('CounterApp', (accounts) => {
it('should be tested'); it('should be tested')
}); })

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
{ {
"name": "Proposal", "name": "Proposal",
"description": "Kredits Proposal app" "description": "Kredits proposal app"
} }

View File

@@ -1,5 +0,0 @@
// const Proposal = artifacts.require('Proposal.sol');
contract('Proposal', (_accounts) => {
it('should be tested');
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
{ {
"name": "Token", "name": "Token",
"description": "Kredits Token app" "description": "Kredits token app"
} }

View File

@@ -1,5 +1,5 @@
// const Token = artifacts.require('Token.sol'); const CounterApp = artifacts.require('CounterApp.sol')
contract('Token', (_accounts) => { contract('CounterApp', (accounts) => {
it('should be tested'); it('should be tested')
}); })

File diff suppressed because it is too large Load Diff

View File

@@ -1,48 +1,16 @@
const contractCalls = [ const contractCalls = [
['Contributor', 'add', [{ ['Contributor', 'add', [{ account: '0x7e8f313c56f809188313aa274fa67ee58c31515d', name: 'bumi', isCore: true, kind: 'person', url: '', github_username: 'bumi', github_uid: 318, wiki_username: 'bumi' }, { gasLimit: 200000 }]],
account: '0x7e8f313c56f809188313aa274fa67ee58c31515d', ['Contributor', 'add', [{ account: '0x49575f3DD9a0d60aE661BC992f72D837A77f05Bc', name: 'raucao', isCore: true, kind: 'person', url: '', github_username: 'skddc', github_uid: 842, wiki_username: 'raucau' }, { gasLimit: 200000 }]],
name: 'bumi', ['Proposal', 'addProposal', [{ contributorId: 1, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
kind: 'person', ['Proposal', 'addProposal', [{ contributorId: 2, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
url: '', ['Proposal', 'addProposal', [{ contributorId: 2, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Hacked on kredits', url: '' }, { gasLimit: 350000 }]],
github_username: 'bumi',
github_uid: 318,
gitea_username: 'bumi',
wiki_username: 'Bumi'
}, { gasLimit: 200000 }]],
['Contributor', 'add', [{
account: '0x49575f3DD9a0d60aE661BC992f72D837A77f05Bc',
name: 'raucao',
kind: 'person',
url: '',
github_username: 'skddc',
github_uid: 842,
gitea_username: 'raucao',
wiki_username: 'Basti'
}, { gasLimit: 200000 }]],
['Contributor', 'add', [{
account: '0xF722709ECC3B05c19d02E82a2a4A4021B8F48C62',
name: 'Manuel',
kind: 'person',
url: '',
github_username: 'fsmanuel',
github_uid: 54812,
wiki_username: 'Manuel'
}, { gasLimit: 200000 }]],
['Proposal', 'addProposal', [{ contributorId: 1, contributorIpfsHash: 'QmWKCYGr2rSf6abUPaTYqf98urvoZxGrb7dbspFZA6oyVF', date: '2019-04-09', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-10', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-11', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Hacked on kredits', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'vote', [1, { gasLimit: 550000 }]], ['Proposal', 'vote', [1, { gasLimit: 550000 }]],
['Contribution', 'addContribution', [{ contributorId: 1, contributorIpfsHash: 'QmWKCYGr2rSf6abUPaTYqf98urvoZxGrb7dbspFZA6oyVF', date: '2019-04-11', amount: 5000, kind: 'dev', description: '[67P/kredits-contracts] Introduce contribution token', url: '' }, { gasLimit: 350000 }]], ['Contribution', 'addContribution', [{ contributorId: 1, amount: 5000, kind: 'dev', description: '[67P/kredits-contracts] Introduce contribution token', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-11', amount: 1500, kind: 'dev', description: '[67P/kredits-web] Reviewed stuff', url: '' }, { gasLimit: 350000 }]], ['Contribution', 'addContribution', [{ contributorId: 2, amount: 1500, kind: 'dev', description: '[67P/kredits-web] Reviewed stuff', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'claim', [1, { gasLimit: 300000 }]] ['Contribution', 'claim', [1, { gasLimit: 300000 }]]
]; ];
const funds = [ const funds = [
'0x7e8f313c56f809188313aa274fa67ee58c31515d', '0x7e8f313c56f809188313aa274fa67ee58c31515d',
'0xa502eb4021f3b9ab62f75b57a94e1cfbf81fd827' '0xa502eb4021f3b9ab62f75b57a94e1cfbf81fd827'
]; ];
module.exports = { contractCalls, funds }; module.exports = { contractCalls, funds };

View File

@@ -1,14 +1,5 @@
# Contribution deployments # Contribution deployments
aragon apm publish major --environment=rinkeby"
## 2019-04-24 update balances
✔ Successfully published kredits-contribution.open.aragonpm.eth v6.0.0:
Contract address: 0x2c083EEA83fd3a99C93759D97D0317A43261c758
Content (ipfs): QmULpSqz7BgTFmDu8AL7YZZEz525xkcEzf3dPKtbRdUtFs
Transaction hash: 0x8b01c4c00162e918659d267a2beaf33b578e2aaf9f427f1aa9a43029333c5cd7
## 2019-04-10 - Weltempfänger release ## 2019-04-10 - Weltempfänger release
✔ Successfully published kredits-contribution.open.aragonpm.eth v5.0.0: ✔ Successfully published kredits-contribution.open.aragonpm.eth v5.0.0:

View File

@@ -1,15 +1,5 @@
# Contributor deployments # Contributor deployments
aragon apm publish major --environment=rinkeby
## 2019-04-29 update balances
✔ Successfully published kredits-contributor.open.aragonpm.eth v5.0.0:
Contract address: 0xadefa3b66b68a127Fe38bEa1813b844EE69CFD86
Content (ipfs): QmeygbQgoj2McLWzo9hJayLWuBZqFaK4HTpa5qLeQdkn5K
Transaction hash: 0x4237a9636f6e4a8190e0d5bcfa85a452da097bf654a173a88e0e1de3d078f08d
## 2019-04-10 - Weltempfänger release ## 2019-04-10 - Weltempfänger release
✔ Successfully published kredits-contributor.open.aragonpm.eth v4.0.0: ✔ Successfully published kredits-contributor.open.aragonpm.eth v4.0.0:

View File

@@ -1,19 +1,5 @@
# Kredits deployment # Kredits deployment
## 2019-04-24 upgrade contributor and contribution
aragon dao upgrade 0xc34edf7d11b7f8433d597f0bb0697acdff55ef14 kredits-contributor.open.aragonpm.eth --environment=rinkeby
eth-provider | Invalid provider preset/location: "local"
✔ Fetching kredits-contributor.open.aragonpm.eth@latest
✔ Upgrading app
✔ Successfully executed: "Set the resolving address of 'kredits-contributor.open.aragonpm.eth' in namespace 'App code' to 0xadefa3b66b68a127Fe38bEa1813b844EE69CFD86"
aragon dao upgrade 0xc34edf7d11b7f8433d597f0bb0697acdff55ef14 kredits-contribution.open.aragonpm.eth --environment=rinkeby
✔ Fetching kredits-contribution.open.aragonpm.eth@latest
✔ Upgrading app
✔ Successfully executed: "Set the resolving address of 'kredits-contribution.open.aragonpm.eth' in namespace 'App code' to 0x2c083EEA83fd3a99C93759D97D0317A43261c758"
## 2019-04-10 - Weltempfänger release ## 2019-04-10 - Weltempfänger release
Using KreditsKit at: 0x76e069b47b79442657eaf0555a32c6b16fa1b8b4 Using KreditsKit at: 0x76e069b47b79442657eaf0555a32c6b16fa1b8b4

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -2,15 +2,11 @@ const Base = require('./base');
const EthersUtils = require('ethers').utils; const EthersUtils = require('ethers').utils;
class Acl extends Base { class Acl extends Base {
hasPermission(fromAddress, contractAddress, roleID, params = null) { hasPermission(fromAddress, contractAddress, roleID, params = null) {
let roleHash = EthersUtils.keccak256(EthersUtils.toUtf8Bytes(roleID)); let roleHash = EthersUtils.keccak256(EthersUtils.toUtf8Bytes(roleID));
console.log(roleHash)
return this.functions.hasPermission( return this.functions.hasPermission(fromAddress, contractAddress, roleHash, params);
fromAddress,
contractAddress,
roleHash,
params
);
} }
} }

View File

@@ -20,5 +20,4 @@ class Base {
return this.contract.on(type, callback); return this.contract.on(type, callback);
} }
} }
module.exports = Base; module.exports = Base;

View File

@@ -1,9 +1,21 @@
const Record = require('./record'); const ethers = require('ethers');
const ContributionSerializer = require('../serializers/contribution');
class Contribution extends Record { const ContributionSerializer = require('../serializers/contribution');
get count () { const Base = require('./base');
return this.functions.contributionsCount();
class Contribution extends Base {
all() {
return this.functions.contributionsCount()
.then(async (count) => {
let contributions = [];
for (let id = 1; id <= count; id++) {
const contribution = await this.getById(id)
contributions.push(contribution);
}
return contributions;
});
} }
getById(id) { getById(id) {
@@ -11,6 +23,7 @@ class Contribution extends Record {
.then(data => { .then(data => {
return this.ipfs.catAndMerge(data, ContributionSerializer.deserialize); return this.ipfs.catAndMerge(data, ContributionSerializer.deserialize);
}); });
} }
getByContributorId(contributorId) { getByContributorId(contributorId) {
@@ -34,17 +47,13 @@ class Contribution extends Record {
}); });
} }
async addContribution (contributionAttr, callOptions = {}) { addContribution(contributionAttr, callOptions = {}) {
const contribution = new ContributionSerializer(contributionAttr); let json = ContributionSerializer.serialize(contributionAttr);
// TODO: validate against schema
try { await contribution.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contribution.serialize();
return this.ipfs return this.ipfs
.add(jsonStr) .add(json)
.then(ipfsHashAttr => { .then((ipfsHashAttr) => {
let contribution = [ let contribution = [
contributionAttr.amount, contributionAttr.amount,
contributionAttr.contributorId, contributionAttr.contributorId,

View File

@@ -1,16 +1,27 @@
const Record = require('./record'); const ethers = require('ethers');
const ContributorSerializer = require('../serializers/contributor'); const RSVP = require('rsvp');
const formatKredits = require('../utils/format-kredits');
class Contributor extends Record { const ContributorSerializer = require('../serializers/contributor');
get count () { const Base = require('./base');
return this.functions.contributorsCount();
class Contributor extends Base {
all() {
return this.functions.contributorsCount()
.then(count => {
let contributors = [];
for (let id = 1; id <= count; id++) {
contributors.push(this.getById(id));
}
return RSVP.all(contributors);
});
} }
getById(id) { getById(id) {
return this.functions.getContributorById(id) return this.functions.getContributorById(id)
.then(data => { // Fetch IPFS data if available
data.balanceInt = formatKredits(data.balance); .then((data) => {
return this.ipfs.catAndMerge(data, ContributorSerializer.deserialize); return this.ipfs.catAndMerge(data, ContributorSerializer.deserialize);
}); });
} }
@@ -39,16 +50,12 @@ class Contributor extends Record {
}); });
} }
async add (contributorAttr, callOptions = {}) { add(contributorAttr, callOptions = {}) {
let contributor = new ContributorSerializer(contributorAttr); let json = ContributorSerializer.serialize(contributorAttr);
// TODO: validate against schema
try { await contributor.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contributor.serialize();
return this.ipfs return this.ipfs
.add(jsonStr) .add(json)
.then((ipfsHashAttr) => { .then((ipfsHashAttr) => {
let contributor = [ let contributor = [
contributorAttr.account, contributorAttr.account,
@@ -60,30 +67,6 @@ class Contributor extends Record {
return this.functions.addContributor(...contributor, callOptions); return this.functions.addContributor(...contributor, callOptions);
}); });
} }
updateProfile (contributorId, updateAttr, callOptions = {}) {
return this.getById(contributorId).then(async (contributor) => {
let updatedContributorAttr = Object.assign(contributor, updateAttr);
let updatedContributor = new ContributorSerializer(updatedContributorAttr);
try { await updatedContributor.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = updatedContributor.serialize();
return this.ipfs
.add(jsonStr)
.then(ipfsHashAttr => {
return this.functions.updateContributorProfileHash(
contributorId,
ipfsHashAttr.hashDigest,
ipfsHashAttr.hashFunction,
ipfsHashAttr.hashSize,
callOptions
);
});
});
}
} }
module.exports = Contributor; module.exports = Contributor;

View File

@@ -4,5 +4,5 @@ module.exports = {
Proposal: require('./proposal'), Proposal: require('./proposal'),
Token: require('./token'), Token: require('./token'),
Kernel: require('./kernel'), Kernel: require('./kernel'),
Acl: require('./acl'), Acl: require('./acl')
}; };

View File

@@ -1,9 +1,21 @@
const Record = require('./record'); const ethers = require('ethers');
const ContributionSerializer = require('../serializers/contribution'); const RSVP = require('rsvp');
class Proposal extends Record { const ContributionSerializer = require('../serializers/contribution');
get count () { const Base = require('./base');
return this.functions.proposalsCount();
class Proposal extends Base {
all() {
return this.functions.proposalsCount()
.then(count => {
let proposals = [];
for (let id = 1; id <= count; id++) {
proposals.push(this.getById(id));
}
return RSVP.all(proposals);
});
} }
getById(id) { getById(id) {
@@ -13,16 +25,12 @@ class Proposal extends Record {
}); });
} }
async addProposal (proposalAttr, callOptions = {}) { addProposal(proposalAttr, callOptions = {}) {
const contribution = new ContributionSerializer(proposalAttr); let json = ContributionSerializer.serialize(proposalAttr);
// TODO: validate against schema
try { await contribution.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contribution.serialize();
return this.ipfs return this.ipfs
.add(jsonStr) .add(json)
.then((ipfsHashAttr) => { .then((ipfsHashAttr) => {
let proposal = [ let proposal = [
proposalAttr.contributorId, proposalAttr.contributorId,
@@ -37,4 +45,4 @@ class Proposal extends Record {
} }
} }
module.exports = Proposal; module.exports = Proposal

View File

@@ -1,14 +0,0 @@
const Base = require('./base');
const paged = require('../utils/pagination');
class Record extends Base {
all (options = {}) {
return this.count
.then((count) => {
let records = paged(count, options).map((id) => this.getById(id));
return Promise.all(records);
});
}
}
module.exports = Record;

View File

@@ -4,3 +4,4 @@ class Token extends Base {
} }
module.exports = Token; module.exports = Token;

View File

@@ -1,7 +1,7 @@
const ethers = require('ethers'); const ethers = require('ethers');
const RSVP = require('rsvp');
const Preflight = require('./utils/preflight'); const Preflight = require('./utils/preflight');
const deprecate = require('./utils/deprecate');
const ABIS = { const ABIS = {
Contributor: require('./abis/Contributor.json'), Contributor: require('./abis/Contributor.json'),
@@ -9,19 +9,19 @@ const ABIS = {
Token: require('./abis/Token.json'), Token: require('./abis/Token.json'),
Proposal: require('./abis/Proposal.json'), Proposal: require('./abis/Proposal.json'),
Kernel: require('./abis/Kernel.json'), Kernel: require('./abis/Kernel.json'),
Acl: require('./abis/ACL.json'), Acl: require('./abis/ACL.json')
}; };
const APP_CONTRACTS = [ const APP_CONTRACTS = [
'Contributor', 'Contributor',
'Contribution', 'Contribution',
'Token', 'Token',
'Proposal', 'Proposal',
'Acl', 'Acl'
]; ];
const DaoAddresses = require('./addresses/dao.json'); const DaoAddresses = require('./addresses/dao.json');
const Contracts = require('./contracts'); const Contracts = require('./contracts');
const IPFS = require('./utils/ipfs'); const IPFS = require('./utils/ipfs')
// Helpers // Helpers
function capitalize(word) { function capitalize(word) {
@@ -41,31 +41,28 @@ class Kredits {
this.abis = abis || ABIS; this.abis = abis || ABIS;
this.ipfs = new IPFS(ipfsConfig); this.ipfs = new IPFS(ipfsConfig);
this.contracts = {}; this.contracts = {};
this.networkId = null;
} }
init(names) { init(names) {
let contractsToLoad = names || APP_CONTRACTS; let contractsToLoad = names || APP_CONTRACTS;
return this.provider.getNetwork().then(network => { return this.provider.getNetwork().then(network => {
this.networkId = network.chainId.toString(); this.addresses['Kernel'] = this.addresses['Kernel'] || DaoAddresses[network.chainId.toString()];
this.addresses['Kernel'] = this.addresses['Kernel'] || DaoAddresses[this.networkId];
let addressPromises = contractsToLoad.map((contractName) => { let addressPromises = contractsToLoad.map((contractName) => {
return this.Kernel.getApp(contractName).then((address) => { return this.Kernel.getApp(contractName).then((address) => {
this.addresses[contractName] = address; this.addresses[contractName] = address;
}).catch((error) => { }).catch((error) => {
console.log(error);
throw new Error(`Failed to get address for ${contractName} from DAO at ${this.Kernel.contract.address} throw new Error(`Failed to get address for ${contractName} from DAO at ${this.Kernel.contract.address}
- ${error.message}` - ${error.message}`
); );
}); });
}); });
return RSVP.all(addressPromises).then(() => { return this });
return Promise.all(addressPromises).then(() => { return this; });
}); });
} }
static setup(provider, signer, ipfsConfig = null) { static setup(provider, signer, ipfsConfig = null) {
deprecate('Kredits.setup() is deprecated use new Kredits().init() instead'); console.log('Kredits.setup() is deprecated use new Kredits().init() instead');
return new Kredits(provider, signer, { ipfsConfig: ipfsConfig }).init(); return new Kredits(provider, signer, { ipfsConfig: ipfsConfig }).init();
} }
@@ -83,7 +80,7 @@ class Kredits {
} }
get Contributors() { get Contributors() {
deprecate('Contributors is deprecated use Contributor instead'); console.log('Contributors is deprecated use Contributor instead');
return this.Contributor; return this.Contributor;
} }

View File

@@ -1,75 +1,19 @@
const schemas = require('kosmos-schemas');
const validator = require('../utils/validator');
/** /**
* Serialization and validation for JSON-LD document of the contribution. * Handle serialization for JSON-LD object of the contribution, according to
* https://github.com/67P/kosmos-schemas/blob/master/schemas/contribution.json
* *
* @class * @class
* @public * @public
*/ */
class Contribution { class Contribution {
constructor (attrs) {
Object.keys(attrs).forEach(a => this[a] = attrs[a]);
}
/**
* Serialize object to JSON
*
* @public
*/
serialize () {
let {
contributorIpfsHash,
date,
time,
kind,
description,
url,
details,
} = this;
let data = {
'@context': 'https://schema.kosmos.org',
'@type': 'Contribution',
'contributor': {
'ipfs': contributorIpfsHash,
},
date,
time,
kind,
description,
'details': details || {},
};
if (url) {
data['url'] = url;
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
/**
* Validate serialized data against schema
*
* @public
*/
validate () {
const serialized = JSON.parse(this.serialize());
const valid = validator.validate(serialized, schemas['contribution']);
return valid ? Promise.resolve() : Promise.reject(validator.error);
}
/** /**
* Deserialize JSON to object * Deserialize JSON to object
* *
* @method
* @public * @public
*/ */
static deserialize(serialized) { static deserialize(serialized) {
let { let {
date,
time,
kind, kind,
description, description,
details, details,
@@ -77,8 +21,6 @@ class Contribution {
} = JSON.parse(serialized.toString('utf8')); } = JSON.parse(serialized.toString('utf8'));
return { return {
date,
time,
kind, kind,
description, description,
details, details,
@@ -87,6 +29,39 @@ class Contribution {
}; };
} }
/**
* Serialize object to JSON
*
* @method
* @public
*/
static serialize(deserialized) {
let {
contributorIpfsHash,
kind,
description,
url,
details
} = deserialized;
let data = {
"@context": "https://schema.kosmos.org",
"@type": "Contribution",
"contributor": {
"ipfs": contributorIpfsHash
},
kind,
description,
"details": details || {}
};
if (url) {
data["url"] = url;
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
} }
module.exports = Contribution; module.exports = Contribution;

View File

@@ -1,5 +1,3 @@
const schemas = require('kosmos-schemas');
const validator = require('../utils/validator');
/** /**
* Handle serialization for JSON-LD object of the contributor, according to * Handle serialization for JSON-LD object of the contributor, according to
* https://github.com/67P/kosmos-schemas/blob/master/schemas/contributor.json * https://github.com/67P/kosmos-schemas/blob/master/schemas/contributor.json
@@ -8,80 +6,6 @@ const validator = require('../utils/validator');
* @public * @public
*/ */
class Contributor { class Contributor {
constructor (attrs) {
Object.keys(attrs).forEach(a => this[a] = attrs[a]);
}
/**
* Serialize object to JSON
*
* @method
* @public
*/
serialize () {
let {
name,
kind,
url,
github_uid,
github_username,
gitea_username,
wiki_username,
} = this;
let data = {
'@context': 'https://schema.kosmos.org',
'@type': 'Contributor',
kind,
name,
'accounts': [],
};
if (url) {
data['url'] = url;
}
if (github_uid) {
data.accounts.push({
'site': 'github.com',
'uid': github_uid,
'username': github_username,
'url': `https://github.com/${github_username}`,
});
}
if (gitea_username) {
data.accounts.push({
'site': 'gitea.kosmos.org',
'username': gitea_username,
'url': `https://gitea.kosmos.org/${gitea_username}`,
});
}
if (wiki_username) {
data.accounts.push({
'site': 'wiki.kosmos.org',
'username': wiki_username,
'url': `https://wiki.kosmos.org/User:${wiki_username}`,
});
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
/**
* Validate serialized data against schema
*
* @public
*/
validate () {
const serialized = JSON.parse(this.serialize());
const valid = validator.validate(serialized, schemas['contributor']);
return valid ? Promise.resolve() : Promise.reject(validator.error);
}
/** /**
* Deserialize JSON to object * Deserialize JSON to object
* *
@@ -96,17 +20,13 @@ class Contributor {
accounts, accounts,
} = JSON.parse(serialized.toString('utf8')); } = JSON.parse(serialized.toString('utf8'));
let github_username, github_uid, gitea_username, wiki_username; let github_username, github_uid, wiki_username;
let github = accounts.find(a => a.site === 'github.com'); let github = accounts.find((a) => a.site === 'github.com');
let gitea = accounts.find(a => a.site === 'gitea.kosmos.org'); let wiki = accounts.find((a) => a.site === 'wiki.kosmos.org');
let wiki = accounts.find(a => a.site === 'wiki.kosmos.org');
if (github) { if (github) {
(({ username: github_username, uid: github_uid} = github)); (({ username: github_username, uid: github_uid} = github));
} }
if (gitea) {
(({ username: gitea_username } = gitea));
}
if (wiki) { if (wiki) {
(({ username: wiki_username } = wiki)); (({ username: wiki_username } = wiki));
} }
@@ -118,12 +38,59 @@ class Contributor {
accounts, accounts,
github_uid, github_uid,
github_username, github_username,
gitea_username,
wiki_username, wiki_username,
ipfsData: serialized, ipfsData: serialized,
}; };
} }
/**
* Serialize object to JSON
*
* @method
* @public
*/
static serialize(deserialized) {
let {
name,
kind,
url,
github_uid,
github_username,
wiki_username,
} = deserialized;
let data = {
"@context": "https://schema.kosmos.org",
"@type": "Contributor",
kind,
name,
"accounts": []
};
if (url) {
data["url"] = url;
}
if (github_uid) {
data.accounts.push({
"site": "github.com",
"uid": github_uid,
"username": github_username,
"url": `https://github.com/${github_username}`
});
}
if (wiki_username) {
data.accounts.push({
"site": "wiki.kosmos.org",
"username": wiki_username,
"url": `https://wiki.kosmos.org/User:${wiki_username}`
});
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
} }
module.exports = Contributor; module.exports = Contributor;

View File

@@ -1,5 +0,0 @@
/*eslint no-console: ["error", { allow: ["warn"] }] */
module.exports = function deprecate (msg) {
console.warn(msg);
};

View File

@@ -1,10 +0,0 @@
const ethersUtils = require('ethers').utils;
module.exports = function(value, options = {}) {
let etherValue = ethersUtils.formatEther(value);
if (options.asFloat) {
return parseFloat(etherValue);
} else {
return parseInt(etherValue);
}
};

View File

@@ -2,6 +2,7 @@ const ipfsClient = require('ipfs-http-client');
const multihashes = require('multihashes'); const multihashes = require('multihashes');
class IPFS { class IPFS {
constructor(config) { constructor(config) {
if (!config) { if (!config) {
config = { host: 'localhost', port: '5001', protocol: 'http' }; config = { host: 'localhost', port: '5001', protocol: 'http' };
@@ -10,14 +11,6 @@ class IPFS {
this._config = config; this._config = config;
} }
get config () {
return this._config;
}
get peerId () {
return this._ipfsAPI.id();
}
catAndMerge(data, deserialize) { catAndMerge(data, deserialize) {
// if no hash details are found simply return the data; nothing to merge // if no hash details are found simply return the data; nothing to merge
if (!data.hashSize || data.hashSize === 0) { if (!data.hashSize || data.hashSize === 0) {
@@ -55,7 +48,7 @@ class IPFS {
hashDigest: '0x' + multihashes.toHexString(multihash.digest), hashDigest: '0x' + multihashes.toHexString(multihash.digest),
hashSize: multihash.length, hashSize: multihash.length,
hashFunction: multihash.code, hashFunction: multihash.code,
ipfsHash: ipfsHash, ipfsHash: ipfsHash
}; };
} }
@@ -63,6 +56,7 @@ class IPFS {
let digest = ipfsClient.Buffer.from(hashData.hashDigest.slice(2), 'hex'); let digest = ipfsClient.Buffer.from(hashData.hashDigest.slice(2), 'hex');
return multihashes.encode(digest, hashData.hashFunction, hashData.hashSize); return multihashes.encode(digest, hashData.hashFunction, hashData.hashSize);
} }
} }
module.exports = IPFS; module.exports = IPFS;

View File

@@ -1,46 +0,0 @@
function pageNumber (number, size, recordCount) {
let numberOfPages = Math.ceil(recordCount / size);
number = parseInt(number) || 1;
// Ensure page number is in range
number = number < 1 ? 1 : number;
number = number > numberOfPages ? numberOfPages : number;
return number;
}
function buildIds (order, number, size, recordCount) {
let offset = size * (number - 1);
let start;
let mapFunction;
if (order === 'asc') {
start = 1 + offset;
mapFunction = (_, i) => start + i;
} else {
start = recordCount - offset;
mapFunction = (_, i) => start - i;
}
// Ensure size is in range
let end = offset + size;
if (end > recordCount) {
let diff = end - recordCount;
size = size - diff;
}
return Array.from({ length: size }, mapFunction);
}
module.exports = function paged (recordCount, options = {}) {
let { order, page } = options;
order = order || 'desc';
page = page || {};
let size = parseInt(page.size) || 25;
let number = pageNumber(page.number, size, recordCount);
return buildIds(order, number, size, recordCount);
};

View File

@@ -4,28 +4,22 @@ class Preflight {
} }
check() { check() {
return this.kredits.ipfs.peerId() return this.kredits.ipfs._ipfsAPI.id()
.catch((error) => { .catch((error) => {
const ipfsConfig = JSON.stringify(this.kredits.ipfs.config); throw new Error(`IPFS node not available; config: ${JSON.stringify(this.kredits.ipfs.config)} - ${error.message}`);
throw new Error(`IPFS node not available; config: ${ipfsConfig} - ${error.message}`);
}) })
.then(() => { .then(() => {
let promises = Object.keys(this.kredits.contracts).map((name) => { let promises = Object.keys(this.kredits.contracts).map((name) => {
let address = this.kredits.contracts[name].contract.address; let contractWrapper = this.kredits.contracts[name];
return this.kredits.provider.getCode(contractWrapper.contract.address).then((code) => {
// TODO: I think this throws the error: Error: contract not deployed
// I guess we don't need that if check anymore...
return this.kredits.provider.getCode(address).then((code) => {
// not sure if we always get the same return value if the code is not available // not sure if we always get the same return value if the code is not available
// so checking if it is < 5 long // so checking if it is < 5 long
if (code === '0x00' || code.length < 5) { if (code === '0x00' || code.length < 5) {
throw new Error(`Contract for: ${name} not found at ${address} on network ${this.kredits.networkId}`); throw new Error(`Contract for: ${name} not found at ${contractWrapper.contract.address} on network ${this.kredits.provider.chainId}`);
} }
return true; return true;
}); });
}); });
return Promise.all(promises); return Promise.all(promises);
}); });
} }

View File

@@ -1,15 +0,0 @@
const tv4 = require('tv4');
const validator = tv4.freshApi();
validator.addFormat({
'date': function(value) {
const dateRegexp = /^[0-9]{4,}-[0-9]{2}-[0-9]{2}$/;
return dateRegexp.test(value) ? null : 'A valid ISO 8601 full-date string is expected';
},
'time': function(value) {
const timeRegexp = /^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(([Zz])|([+|-]([01][0-9]|2[0-3]):[0-5][0-9]))$/;
return timeRegexp.test(value) ? null : 'A valid ISO 8601 full-time string is expected';
},
});
module.exports = validator;

1023
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "kredits-contracts", "name": "kredits-contracts",
"version": "5.3.0", "version": "4.0.2",
"description": "Ethereum contracts and npm wrapper for Kredits", "description": "Ethereum contracts and npm wrapper for Kredits",
"main": "./lib/kredits.js", "main": "./lib/kredits.js",
"directories": { "directories": {
@@ -20,9 +20,6 @@
"deploy:apps": "./scripts/every-app.sh \"aragon apm publish major\"", "deploy:apps": "./scripts/every-app.sh \"aragon apm publish major\"",
"devchain": "aragon devchain --port 7545", "devchain": "aragon devchain --port 7545",
"dao:address": "truffle exec scripts/current-address.js", "dao:address": "truffle exec scripts/current-address.js",
"lint:contracts": "solhint \"contracts/**/*.sol\" \"apps/*/contracts/**/*.sol\"",
"lint:contract-tests": "eslint apps/*/test",
"lint:wrapper": "eslint lib/",
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1"
}, },
"repository": { "repository": {
@@ -40,21 +37,15 @@
"@aragon/kits-base": "^1.0.0", "@aragon/kits-base": "^1.0.0",
"@aragon/os": "^4.1.0", "@aragon/os": "^4.1.0",
"async-each-series": "^1.1.0", "async-each-series": "^1.1.0",
"eslint": "^5.16.0",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-node": "^8.0.1",
"eslint-plugin-promise": "^4.1.1",
"eth-provider": "^0.2.2", "eth-provider": "^0.2.2",
"openzeppelin-solidity": "^2.2.0", "openzeppelin-solidity": "^2.2.0",
"promptly": "^3.0.3", "promptly": "^3.0.3",
"solc": "^0.4.25", "solc": "^0.4.25"
"solhint": "^2.0.0"
}, },
"dependencies": { "dependencies": {
"ethers": "^4.0.27", "ethers": "^4.0.27",
"ipfs-http-client": "^30.1.1", "ipfs-http-client": "^30.1.1",
"kosmos-schemas": "^2.0.0", "rsvp": "^4.8.2"
"tv4": "^1.3.0"
}, },
"keywords": [ "keywords": [
"kosmos", "kosmos",

View File

@@ -1,5 +1,4 @@
const promptly = require('promptly'); const promptly = require('promptly');
const { inspect } = require('util');
const initKredits = require('./helpers/init_kredits.js'); const initKredits = require('./helpers/init_kredits.js');
@@ -27,32 +26,23 @@ module.exports = async function(callback) {
console.log(`Creating a contribution for contributor account ${contributorAccount} ID: ${contributorId}`); console.log(`Creating a contribution for contributor account ${contributorAccount} ID: ${contributorId}`);
[ dateNow, timeNow ] = (new Date()).toISOString().split('T');
let contributionAttributes = { let contributionAttributes = {
contributorId, contributorId,
date: dateNow,
time: timeNow,
amount: await promptly.prompt('Amount: '), amount: await promptly.prompt('Amount: '),
description: await promptly.prompt('Description: '), description: await promptly.prompt('Description: '),
kind: await promptly.prompt('Kind: ', { default: 'dev' }), kind: await promptly.prompt('Kind: ', { default: 'dev' }),
url: await promptly.prompt('URL: ', { default: '' }) url: await promptly.prompt('URL: ', { default: '' })
} }
const contributorData = await kredits.Contributor.getById(contributorId);
contributionAttributes.contributorIpfsHash = contributorData.ipfsHash;
console.log("\nAdding contribution:"); console.log("\nAdding contribution:");
console.log(contributionAttributes); console.log(contributionAttributes);
kredits.Contribution.addContribution(contributionAttributes, { gasLimit: 300000 }) kredits.Contribution.addContribution(contributionAttributes, { gasLimit: 300000 }).then((result) => {
.then(result => {
console.log("\n\nResult:"); console.log("\n\nResult:");
console.log(result); console.log(result);
callback(); callback();
}) }).catch((error) => {
.catch(error => {
console.log('Failed to create contribution'); console.log('Failed to create contribution');
callback(inspect(error)); callback(error);
}); });
} }

View File

@@ -26,7 +26,7 @@ module.exports = async function(callback) {
kind: await prompt('Kind (default person): ', {default: 'person'}), kind: await prompt('Kind (default person): ', {default: 'person'}),
url: await prompt('URL: '), url: await prompt('URL: '),
github_username: await prompt('GitHub username: '), github_username: await prompt('GitHub username: '),
github_uid: parseInt(await prompt('GitHub UID: ')), github_uid: await prompt('GitHub UID: '),
wiki_username: await prompt('Wiki username: '), wiki_username: await prompt('Wiki username: '),
}; };

View File

@@ -1,5 +1,4 @@
const promptly = require('promptly'); const promptly = require('promptly');
const { inspect } = require('util');
const initKredits = require('./helpers/init_kredits.js'); const initKredits = require('./helpers/init_kredits.js');
@@ -26,31 +25,23 @@ module.exports = async function(callback) {
} }
console.log(`Creating a proposal for contributor ID #${contributorId} account: ${contributorAccount}`); console.log(`Creating a proposal for contributor ID #${contributorId} account: ${contributorAccount}`);
[ dateNow, timeNow ] = (new Date()).toISOString().split('T');
let contributionAttributes = { let contributionAttributes = {
contributorId, contributorId,
date: dateNow,
time: timeNow,
amount: await promptly.prompt('Amount: '), amount: await promptly.prompt('Amount: '),
description: await promptly.prompt('Description: '), description: await promptly.prompt('Description: '),
kind: await promptly.prompt('Kind: ', { default: 'dev' }), kind: await promptly.prompt('Kind: ', { default: 'dev' }),
url: await promptly.prompt('URL: ', { default: '' }) url: await promptly.prompt('URL: ', { default: '' })
} }
const contributorData = await kredits.Contributor.getById(contributorId);
contributionAttributes.contributorIpfsHash = contributorData.ipfsHash;
console.log("\nAdding proposal:"); console.log("\nAdding proposal:");
console.log(contributionAttributes); console.log(contributionAttributes);
kredits.Proposal.addProposal(contributionAttributes, { gasLimit: 300000 }) kredits.Proposal.addProposal(contributionAttributes, { gasLimit: 300000 }).then((result) => {
.then((result) => {
console.log("\n\nResult:"); console.log("\n\nResult:");
console.log(result); console.log(result);
callback(); callback();
}).catch((error) => { }).catch((error) => {
console.log('Failed to create proposal'); console.log('Failed to create proposal');
callback(inspect(error)); callback(error);
}); });
} }

363
scripts/deploy-apm.js Normal file
View File

@@ -0,0 +1,363 @@
const namehash = require('eth-ens-namehash').hash
const keccak256 = require('js-sha3').keccak_256
const deployENS = require('@aragon/os/scripts/deploy-test-ens')
const deployDaoFactory = require('@aragon/os/scripts/deploy-daofactory')
const logDeploy = require('@aragon/os/scripts//helpers/deploy-logger')
const getAccounts = require('@aragon/os/scripts//helpers/get-accounts')
const globalArtifacts = this.artifacts // Not injected unless called directly via truffle
const globalWeb3 = this.web3 // Not injected unless called directly via truffle
const ZERO_ADDR = '0x0000000000000000000000000000000000000000'
const defaultOwner = process.env.OWNER
const defaultDaoFactoryAddress = process.env.DAO_FACTORY
const defaultENSAddress = process.env.ENS
module.exports = async (
truffleExecCallback,
{
artifacts = globalArtifacts,
web3 = globalWeb3,
ensAddress = defaultENSAddress,
owner = defaultOwner,
daoFactoryAddress = defaultDaoFactoryAddress,
verbose = true
} = {}
) => {
const log = (...args) => {
if (verbose) { console.log(...args) }
}
const APMRegistry = artifacts.require('APMRegistry')
const Repo = artifacts.require('Repo')
const ENSSubdomainRegistrar = artifacts.require('ENSSubdomainRegistrar')
const DAOFactory = artifacts.require('DAOFactory')
const APMRegistryFactory = artifacts.require('APMRegistryFactory')
const ENS = artifacts.require('ENS')
const Kernel = artifacts.require('Kernel')
const ACL = artifacts.require('ACL')
const tldName = 'eth'
const labelName = 'aragonpm'
const tldHash = namehash(tldName)
const labelHash = '0x'+keccak256(labelName)
const apmNode = namehash(`${labelName}.${tldName}`)
let ens
log('Deploying APM...')
const accounts = await getAccounts(web3)
if (!owner) {
owner = accounts[0]
log('OWNER env variable not found, setting APM owner to the provider\'s first account')
}
log('Owner:', owner)
if (!ensAddress) {
log('=========')
log('Missing ENS! Deploying a custom ENS...')
ens = (await deployENS(null, { artifacts, owner, verbose: false })).ens
ensAddress = ens.address
} else {
ens = ENS.at(ensAddress)
}
log('ENS:', ensAddress)
log(`TLD: ${tldName} (${tldHash})`)
log(`Label: ${labelName} (${labelHash})`)
log(`apmNode: ${apmNode}`)
log('=========')
log('Deploying APM bases...')
const apmRegistryBase = await APMRegistry.new()
await logDeploy(apmRegistryBase, { verbose })
const apmRepoBase = await Repo.new()
await logDeploy(apmRepoBase, { verbose })
const ensSubdomainRegistrarBase = await ENSSubdomainRegistrar.new()
await logDeploy(ensSubdomainRegistrarBase, { verbose })
let daoFactory
if (daoFactoryAddress) {
daoFactory = DAOFactory.at(daoFactoryAddress)
const hasEVMScripts = await daoFactory.regFactory() !== ZERO_ADDR
log(`Using provided DAOFactory (with${hasEVMScripts ? '' : 'out' } EVMScripts):`, daoFactoryAddress)
} else {
log('Deploying DAOFactory with EVMScripts...')
daoFactory = (await deployDaoFactory(null, { artifacts, withEvmScriptRegistryFactory: true, verbose: false })).daoFactory
}
log('Deploying APMRegistryFactory...')
const apmFactory = await APMRegistryFactory.new(
daoFactory.address,
apmRegistryBase.address,
apmRepoBase.address,
ensSubdomainRegistrarBase.address,
ensAddress,
'0x00'
)
await logDeploy(apmFactory, { verbose })
log(`Assigning ENS name (${labelName}.${tldName}) to factory... ${apmFactory.address}`)
if (await ens.owner(apmNode) === accounts[0]) {
log('Transferring name ownership from deployer to APMRegistryFactory')
await ens.setOwner(apmNode, apmFactory.address)
} else {
log('Creating subdomain and assigning it to APMRegistryFactory')
try {
await ens.setSubnodeOwner(tldHash, labelHash, apmFactory.address)
//await ens.setSubnodeOwner(apmNode, keccak256('open'), apmFactory.address)
} catch (err) {
console.error(err);
console.error(
`Error: could not set the owner of '${labelName}.${tldName}' on the given ENS instance`,
`(${ensAddress}). Make sure you have ownership rights over the subdomain.`
)
throw err
}
}
log('Deploying APM...')
const receipt = await apmFactory.newAPM(tldHash, labelHash, owner)
log('=========')
const apmAddr = receipt.logs.filter(l => l.event == 'DeployAPM')[0].args.apm
console.log(receipt.logs);
const apmDAO = APMRegistry.at(apmAddr);
log('Address:', apmAddr)
log('Transaction hash:', receipt.tx)
log('Deploying subdomain APM bases...')
const subApmRegistryBase = await APMRegistry.new()
await logDeploy(subApmRegistryBase, { verbose })
const subApmRepoBase = await Repo.new()
await logDeploy(subApmRepoBase, { verbose })
const subEnsSubdomainRegistrarBase = await ENSSubdomainRegistrar.new()
await logDeploy(subEnsSubdomainRegistrarBase, { verbose })
log('Deploying APMRegistryFactory...')
const subApmFactory = await APMRegistryFactory.new(
daoFactory.address,
subApmRegistryBase.address,
subApmRepoBase.address,
subEnsSubdomainRegistrarBase.address,
ensAddress,
'0x00'
)
await logDeploy(subApmFactory, { verbose })
const kernelAddr = await apmDAO.kernel();
console.log(kernelAddr);
const aclAddr = await Kernel.at(kernelAddr).acl();
const acl = ACL.at(aclAddr);
const role = await ensSubdomainRegistrarBase.CREATE_NAME_ROLE();
const registrarAddr = await apmDAO.registrar();
const registrar = ENSSubdomainRegistrar.at(registrarAddr);
console.log(owner, registrarAddr, role);
try {
await acl.grantPermission(owner, registrarAddr, role);
log('Deploying subdomain APM...')
console.log(await acl.hasPermission(owner, registrar.address, role))
console.log(subApmFactory.address)
await registrar.createNameAndPoint(namehash('open'), subApmFactory.address)
const subReceipt = await subApmFactory.newAPM(namehash('aragonpm.eth'), namehash('open'), owner)
log('=========')
const subApmAddr = subReceipt.logs.filter(l => l.event == 'DeployAPM')[0].args.apm
console.log(subReceipt.logs);
console.log(subApmAddr);
} catch(e) {
console.log(e);
}
if (typeof truffleExecCallback === 'function') {
// Called directly via `truffle exec`
truffleExecCallback()
} else {
return {
apmFactory,
ens,
apm: APMRegistry.at(apmAddr),
}
}
}
/*
"
const namehash = require('eth-ens-namehash').hash
const keccak256 = require('js-sha3').keccak_256
const deployENS = require('@aragon/os/scripts/deploy-test-ens')
const deployDaoFactory = require('@aragon/os/scripts/deploy-daofactory')
const logDeploy = require('@aragon/os/scripts//helpers/deploy-logger')
const getAccounts = require('@aragon/os/scripts//helpers/get-accounts')
const globalArtifacts = this.artifacts // Not injected unless called directly via truffle
const globalWeb3 = this.web3 // Not injected unless called directly via truffle
const ZERO_ADDR = '0x0000000000000000000000000000000000000000'
const defaultOwner = process.env.OWNER
const defaultDaoFactoryAddress = process.env.DAO_FACTORY
const defaultENSAddress = process.env.ENS
module.exports = async (
truffleExecCallback,
{
artifacts = globalArtifacts,
web3 = globalWeb3,
ensAddress = defaultENSAddress,
owner = defaultOwner,
daoFactoryAddress = defaultDaoFactoryAddress,
verbose = true
} = {}
) => {
const log = (...args) => {
if (verbose) { console.log(...args) }
}
const APMRegistry = artifacts.require('APMRegistry')
const Repo = artifacts.require('Repo')
const ENSSubdomainRegistrar = artifacts.require('ENSSubdomainRegistrar')
const DAOFactory = artifacts.require('DAOFactory')
const APMRegistryFactory = artifacts.require('APMRegistryFactory')
const ENS = artifacts.require('ENS')
const tldName = 'eth'
const labelName = 'open.aragonpm'
const tldHash = namehash(tldName)
const labelHash = '0x'+keccak256(labelName)
const apmNode = namehash(`${labelName}.${tldName}`)
let ens
log('Deploying APM...')
const accounts = await getAccounts(web3)
if (!owner) {
owner = accounts[0]
log('OWNER env variable not found, setting APM owner to the provider\'s first account')
}
log('Owner:', owner)
if (!ensAddress) {
log('=========')
log('Missing ENS! Deploying a custom ENS...')
ens = (await deployENS(null, { artifacts, owner, verbose: false })).ens
ensAddress = ens.address
} else {
ens = ENS.at(ensAddress)
}
log('ENS:', ensAddress)
log(`TLD: ${tldName} (${tldHash})`)
log(`Label: ${labelName} (${labelHash})`)
log('=========')
log('Deploying APM bases...')
const apmRegistryBase = await APMRegistry.new()
await logDeploy(apmRegistryBase, { verbose })
const apmRepoBase = await Repo.new()
await logDeploy(apmRepoBase, { verbose })
const ensSubdomainRegistrarBase = await ENSSubdomainRegistrar.new()
await logDeploy(ensSubdomainRegistrarBase, { verbose })
let daoFactory
if (daoFactoryAddress) {
daoFactory = DAOFactory.at(daoFactoryAddress)
const hasEVMScripts = await daoFactory.regFactory() !== ZERO_ADDR
log(`Using provided DAOFactory (with${hasEVMScripts ? '' : 'out' } EVMScripts):`, daoFactoryAddress)
} else {
log('Deploying DAOFactory with EVMScripts...')
daoFactory = (await deployDaoFactory(null, { artifacts, withEvmScriptRegistryFactory: true, verbose: false })).daoFactory
}
log('Deploying APMRegistryFactory...')
const apmFactory = await APMRegistryFactory.new(
daoFactory.address,
apmRegistryBase.address,
apmRepoBase.address,
ensSubdomainRegistrarBase.address,
ensAddress,
'0x00'
)
await logDeploy(apmFactory, { verbose })
log(`Assigning ENS name (${labelName}.${tldName}) to factory...`)
if (await ens.owner(apmNode) === accounts[0]) {
log('Transferring name ownership from deployer to APMRegistryFactory')
await ens.setOwner(apmNode, apmFactory.address)
} else {
log('Creating subdomain and assigning it to APMRegistryFactory')
try {
await ens.setSubnodeOwner(tldHash, labelHash, apmFactory.address)
} catch (err) {
console.error(
`Error: could not set the owner of '${labelName}.${tldName}' on the given ENS instance`,
`(${ensAddress}). Make sure you have ownership rights over the subdomain.`
)
throw err
}
}
log('Deploying APM...')
const receipt = await apmFactory.newAPM(tldHash, labelHash, owner)
log('=========')
const apmAddr = receipt.logs.filter(l => l.event == 'DeployAPM')[0].args.apm
log('# APM:')
log('Address:', apmAddr)
log('Transaction hash:', receipt.tx)
log('=========')
try {
const kernel = await ensSubdomainRegistrarBase.kernel();
const acl = await kernel.acl();
console.log(acl);
let ret = await ensSubdomainRegistrarBase.createNameAndPoint(labelHash, apmAddr).then(console.log).catch(console.log);
console.log(ret);
} catch(e) {
console.log(e);
}
if (typeof truffleExecCallback === 'function') {
// Called directly via `truffle exec`
truffleExecCallback()
} else {
return {
apmFactory,
ens,
apm: APMRegistry.at(apmAddr),
}
}
}
*/

View File

@@ -15,14 +15,13 @@ module.exports = async function(callback) {
console.log(`Using Contribution at: ${kredits.Contribution.contract.address}`); console.log(`Using Contribution at: ${kredits.Contribution.contract.address}`);
const table = new Table({ const table = new Table({
head: ['ID', 'Contributor ID', 'Description', 'Amount', 'Confirmed?', 'Vetoed?', 'Claimed?', 'IPFS'] head: ['ID', 'Contributor ID', 'Description', 'Amount', 'Confirmed?', 'Vetoed?', 'Claimed?']
}) })
try { try {
let blockNumber = await kredits.provider.getBlockNumber(); let blockNumber = await kredits.provider.getBlockNumber();
let contributions = await kredits.Contribution.all(); let contributions = await kredits.Contribution.all();
console.log(`Current block number: ${blockNumber}`);
contributions.forEach((c) => { contributions.forEach((c) => {
const confirmed = c.confirmedAtBlock <= blockNumber; const confirmed = c.confirmedAtBlock <= blockNumber;
@@ -31,17 +30,13 @@ module.exports = async function(callback) {
c.contributorId, c.contributorId,
`${c.description}`, `${c.description}`,
c.amount.toString(), c.amount.toString(),
`${confirmed} (${c.confirmedAtBlock})`, confirmed,
c.vetoed, c.vetoed,
c.claimed, c.claimed,
c.ipfsHash
]) ])
}); });
console.log(table.toString()); console.log(table.toString());
let totalKreditsEarned = await kredits.Contribution.functions.totalKreditsEarned(true);
console.log(`Total confirmed kredits: ${totalKreditsEarned}`);
} catch (err) { } catch (err) {
console.log(err); console.log(err);
} }

View File

@@ -15,31 +15,23 @@ module.exports = async function(callback) {
console.log(`Using Contributor at: ${kredits.Contributor.contract.address}`); console.log(`Using Contributor at: ${kredits.Contributor.contract.address}`);
const table = new Table({ const table = new Table({
head: ['ID', 'Account', 'Name', 'Core?', 'Balance', 'Kredits earned', 'Contributions count', 'IPFS'] head: ['ID', 'Account', 'Core?', 'Name', 'Balance']
}) })
try { let contributors = await kredits.Contributor.all()
const contributors = await kredits.Contributor.all()
contributors.forEach((c) => { contributors.forEach((c) => {
table.push([ table.push([
c.id.toString(), c.id.toString(),
c.account, c.account,
`${c.name}`,
c.isCore, c.isCore,
c.balanceInt.toString(), `${c.name}`,
c.totalKreditsEarned.toString(), ethers.utils.formatEther(c.balance)
c.contributionsCount.toString(),
c.ipfsHash
]) ])
}) })
console.log(table.toString()) console.log(table.toString())
} catch(e) {
callback(e);
return;
}
callback() callback()
} }

8836
yarn.lock

File diff suppressed because it is too large Load Diff