Compare commits

..

2 Commits

Author SHA1 Message Date
3d0abe9027 hack hack apm 2019-04-11 01:31:47 +02:00
4fb8f1dc86 hack hack apm 2019-04-11 01:30:05 +02:00
52 changed files with 735 additions and 43043 deletions

View File

@@ -1,32 +0,0 @@
module.exports = {
'env': {
'browser': true,
'es6': true,
'node': true
},
'extends': 'eslint:recommended',
'globals': {
'Atomics': 'readonly',
'SharedArrayBuffer': 'readonly'
},
'parserOptions': {
'ecmaVersion': 2018,
'sourceType': 'module'
},
'rules': {
'comma-dangle': ['error', {
arrays: 'always-multiline',
objects: 'always-multiline',
imports: 'never',
exports: 'never',
functions: 'ignore',
}],
'eol-last': ['error', 'always'],
semi: ['error', 'always'],
'space-before-function-paren': ['error', {
anonymous: 'never',
named: 'always',
asyncArrow: 'always',
}],
}
}

2
.gitignore vendored
View File

@@ -4,5 +4,3 @@ node_modules
**/node_modules
.ganache-db
.tm_properties
yarn-error.log
.DS_Store

View File

@@ -1,9 +0,0 @@
{
"extends": [
"solhint:default",
"solhint:recommended"
],
"rules": {
"indent": "2"
}
}

View File

@@ -1,26 +0,0 @@
---
language: node_js
node_js:
- "11"
sudo: false
dist: xenial
cache:
yarn: true
before_install:
- curl -o- -L https://yarnpkg.com/install.sh | bash
- export PATH=$HOME/.yarn/bin:$PATH
install:
- yarn install --no-lockfile --non-interactive
script:
- yarn lint:wrapper
- yarn lint:contract-tests
# - yarn lint:contracts
branches:
only:
- master

View File

@@ -36,7 +36,7 @@ development ganache.
$ npm run devchain (or aragon devchain --port 7545)
To clear/reset the chain use (e.g. if you run out of funds on your devchain)
To clear/reset the chain use:
$ npm run devchain -- --reset (or aragon devchain --port 7545 --reset)
@@ -79,9 +79,9 @@ Kredits DAO independently.
![](docs/kredits-diagram.png)
A DAO can be deployed using the `scripts/deploy-kit.js` script or with the
`npm run deploy:dao` command. This deploys a new Kredits DAO, installs the
latest app versions and sets the required permissions.
A DAO can be deployed using the `scripts/deploy-kit.js` script or with the `npm
run deploy:dao` command. This deploys a new Kredits DAO, installs the latest
app versions and sets the required permissions.
See each app in `/apps/*` for details.
@@ -115,11 +115,11 @@ Script to add a new entries to the contracts using the JS wrapper
$ truffle exec scripts/add-{contributor, contribution, proposal}.js
### list-{contributors, contributions, proposals}.js
### list-{contributor, contribution, proposal}.js
List contract entries
$ truffle exec scripts/list-{contributors, contributions, proposals}.js
$ truffle exec scripts/list-{contributor, contribution, proposal}.js
### send-funds.js
@@ -152,7 +152,7 @@ Deploys a new KreditsKit that allows to create a new DAO
or
$ npm run deploy:kit
`ENS` address is required as environment variable.
`ENS` address is required as environment variable.
`DAO_FACTORY` can optionally be set as environment variable. (see aragon)
### new-dao.js

View File

@@ -1,12 +0,0 @@
module.exports = {
'globals': {
contract: true,
describe: true,
it: true,
},
rules: {
'no-unused-vars': ['error', {
'argsIgnorePattern': '^_',
}],
}
}

View File

@@ -118,26 +118,6 @@ contract Contribution is AragonApp {
// Custom functions
//
function totalKreditsEarned(bool confirmedOnly) public view returns (uint256 count) {
for (uint32 i = 1; i <= contributionsCount; i++) {
ContributionData memory c = contributions[i];
if (block.number >= c.confirmedAtBlock || !confirmedOnly) {
count += c.amount; // should use safemath
}
}
}
function totalKreditsEarnedByContributor(uint32 contributorId, bool confirmedOnly) public view returns (uint256 count) {
uint256 tokenBalance = ownedContributions[contributorId].length;
for (uint256 i = 0; i < tokenBalance; i++) {
uint32 cId = ownedContributions[contributorId][i];
ContributionData memory c = contributions[cId];
if (block.number >= c.confirmedAtBlock || !confirmedOnly) {
count += c.amount; // should use safemath
}
}
}
function getContribution(uint32 contributionId) public view returns (uint32 id, uint32 contributorId, uint32 amount, bool claimed, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, uint256 confirmedAtBlock, bool exists, bool vetoed) {
id = contributionId;
ContributionData storage c = contributions[id];

View File

@@ -11,10 +11,10 @@
"start": "npm run start:aragon:ipfs",
"start:aragon:ipfs": "aragon run",
"start:aragon:http": "aragon run --http localhost:8001 --http-served-from ./dist",
"start:app": "",
"start:app": "npm run sync-assets && npm run build:script -- --no-minify && parcel serve app/index.html -p 8001 --out-dir dist/ --no-cache",
"test": "aragon contracts test",
"compile": "aragon contracts compile",
"sync-assets": "",
"sync-assets": "copy-aragon-ui-assets -n aragon-ui ./dist",
"build:app": "",
"build:script": "",
"build": "",

View File

@@ -1,5 +1,5 @@
// const Contribution = artifacts.require('Contribution.sol');
const CounterApp = artifacts.require('Contribution.sol')
contract('Contribution', (_accounts) => {
it('should be tested');
});
contract('Contribution', (accounts) => {
it('should be tested')
})

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,6 @@ import "@aragon/os/contracts/kernel/IKernel.sol";
interface ITokenBalance {
function balanceOf(address contributorAccount) public view returns (uint256);
}
interface IContributionBalance {
function totalKreditsEarnedByContributor(uint32 contributorId, bool confirmedOnly) public view returns (uint256 count);
function balanceOf(address owner) public view returns (uint256);
}
contract Contributor is AragonApp {
bytes32 public constant KERNEL_APP_ADDR_NAMESPACE = 0xd6f028ca0e8edb4a8c9757ca4fdccab25fa1e0317da1188108f7d2dee14902fb;
@@ -47,12 +43,6 @@ contract Contributor is AragonApp {
return k.getApp(KERNEL_APP_ADDR_NAMESPACE, appIds[uint8(Apps.Token)]);
}
function getContributionContract() public view returns (address) {
IKernel k = IKernel(kernel());
return k.getApp(KERNEL_APP_ADDR_NAMESPACE, appIds[uint8(Apps.Contribution)]);
}
function coreContributorsCount() view public returns (uint32) {
uint32 count = 0;
for (uint32 i = 1; i <= contributorsCount; i++) {
@@ -128,7 +118,7 @@ contract Contributor is AragonApp {
return contributors[id];
}
function getContributorById(uint32 _id) public view returns (uint32 id, address account, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, bool isCore, uint256 balance, uint256 totalKreditsEarned, uint256 contributionsCount, bool exists ) {
function getContributorById(uint32 _id) public view returns (uint32 id, address account, bytes32 hashDigest, uint8 hashFunction, uint8 hashSize, bool isCore, uint256 balance, bool exists ) {
id = _id;
Contributor storage c = contributors[_id];
account = c.account;
@@ -138,9 +128,6 @@ contract Contributor is AragonApp {
isCore = isCoreTeam(id);
address token = getTokenContract();
balance = ITokenBalance(token).balanceOf(c.account);
address contribution = getContributionContract();
totalKreditsEarned = IContributionBalance(contribution).totalKreditsEarnedByContributor(_id, true);
contributionsCount = IContributionBalance(contribution).balanceOf(c.account);
exists = c.exists;
}

View File

@@ -1,5 +1,5 @@
// const Contributor = artifacts.require('Contributor.sol');
const CounterApp = artifacts.require('CounterApp.sol')
contract('Contributor', (_accounts) => {
it('should be tested');
});
contract('CounterApp', (accounts) => {
it('should be tested')
})

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
{
"name": "Proposal",
"description": "Kredits Proposal app"
"description": "Kredits proposal app"
}

View File

@@ -1,5 +0,0 @@
// const Proposal = artifacts.require('Proposal.sol');
contract('Proposal', (_accounts) => {
it('should be tested');
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
{
"name": "Token",
"description": "Kredits Token app"
"description": "Kredits token app"
}

View File

@@ -1,5 +1,5 @@
// const Token = artifacts.require('Token.sol');
const CounterApp = artifacts.require('CounterApp.sol')
contract('Token', (_accounts) => {
it('should be tested');
});
contract('CounterApp', (accounts) => {
it('should be tested')
})

File diff suppressed because it is too large Load Diff

View File

@@ -1,48 +1,16 @@
const contractCalls = [
['Contributor', 'add', [{
account: '0x7e8f313c56f809188313aa274fa67ee58c31515d',
name: 'bumi',
kind: 'person',
url: '',
github_username: 'bumi',
github_uid: 318,
gitea_username: 'bumi',
wiki_username: 'Bumi'
}, { gasLimit: 200000 }]],
['Contributor', 'add', [{
account: '0x49575f3DD9a0d60aE661BC992f72D837A77f05Bc',
name: 'raucao',
kind: 'person',
url: '',
github_username: 'skddc',
github_uid: 842,
gitea_username: 'raucao',
wiki_username: 'Basti'
}, { gasLimit: 200000 }]],
['Contributor', 'add', [{
account: '0xF722709ECC3B05c19d02E82a2a4A4021B8F48C62',
name: 'Manuel',
kind: 'person',
url: '',
github_username: 'fsmanuel',
github_uid: 54812,
wiki_username: 'Manuel'
}, { gasLimit: 200000 }]],
['Proposal', 'addProposal', [{ contributorId: 1, contributorIpfsHash: 'QmWKCYGr2rSf6abUPaTYqf98urvoZxGrb7dbspFZA6oyVF', date: '2019-04-09', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-10', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-11', amount: 500, kind: 'dev', description: '[67P/kredits-contracts] Hacked on kredits', url: '' }, { gasLimit: 350000 }]],
['Contributor', 'add', [{ account: '0x7e8f313c56f809188313aa274fa67ee58c31515d', name: 'bumi', isCore: true, kind: 'person', url: '', github_username: 'bumi', github_uid: 318, wiki_username: 'bumi' }, { gasLimit: 200000 }]],
['Contributor', 'add', [{ account: '0x49575f3DD9a0d60aE661BC992f72D837A77f05Bc', name: 'raucao', isCore: true, kind: 'person', url: '', github_username: 'skddc', github_uid: 842, wiki_username: 'raucau' }, { gasLimit: 200000 }]],
['Proposal', 'addProposal', [{ contributorId: 1, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Ran the seeds', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'addProposal', [{ contributorId: 2, amount: 500, kind: 'code', description: '[67P/kredits-contracts] Hacked on kredits', url: '' }, { gasLimit: 350000 }]],
['Proposal', 'vote', [1, { gasLimit: 550000 }]],
['Contribution', 'addContribution', [{ contributorId: 1, contributorIpfsHash: 'QmWKCYGr2rSf6abUPaTYqf98urvoZxGrb7dbspFZA6oyVF', date: '2019-04-11', amount: 5000, kind: 'dev', description: '[67P/kredits-contracts] Introduce contribution token', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 2, contributorIpfsHash: 'QmcHzEeAM26HV2zHTf5HnZrCtCtGdEccL5kUtDakAB7ozB', date: '2019-04-11', amount: 1500, kind: 'dev', description: '[67P/kredits-web] Reviewed stuff', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 1, amount: 5000, kind: 'dev', description: '[67P/kredits-contracts] Introduce contribution token', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'addContribution', [{ contributorId: 2, amount: 1500, kind: 'dev', description: '[67P/kredits-web] Reviewed stuff', url: '' }, { gasLimit: 350000 }]],
['Contribution', 'claim', [1, { gasLimit: 300000 }]]
];
const funds = [
'0x7e8f313c56f809188313aa274fa67ee58c31515d',
'0xa502eb4021f3b9ab62f75b57a94e1cfbf81fd827'
];
module.exports = { contractCalls, funds };

View File

@@ -1,14 +1,5 @@
# Contribution deployments
aragon apm publish major --environment=rinkeby"
## 2019-04-24 update balances
✔ Successfully published kredits-contribution.open.aragonpm.eth v6.0.0:
Contract address: 0x2c083EEA83fd3a99C93759D97D0317A43261c758
Content (ipfs): QmULpSqz7BgTFmDu8AL7YZZEz525xkcEzf3dPKtbRdUtFs
Transaction hash: 0x8b01c4c00162e918659d267a2beaf33b578e2aaf9f427f1aa9a43029333c5cd7
## 2019-04-10 - Weltempfänger release
✔ Successfully published kredits-contribution.open.aragonpm.eth v5.0.0:

View File

@@ -1,15 +1,5 @@
# Contributor deployments
aragon apm publish major --environment=rinkeby
## 2019-04-29 update balances
✔ Successfully published kredits-contributor.open.aragonpm.eth v5.0.0:
Contract address: 0xadefa3b66b68a127Fe38bEa1813b844EE69CFD86
Content (ipfs): QmeygbQgoj2McLWzo9hJayLWuBZqFaK4HTpa5qLeQdkn5K
Transaction hash: 0x4237a9636f6e4a8190e0d5bcfa85a452da097bf654a173a88e0e1de3d078f08d
## 2019-04-10 - Weltempfänger release
✔ Successfully published kredits-contributor.open.aragonpm.eth v4.0.0:

View File

@@ -1,19 +1,5 @@
# Kredits deployment
## 2019-04-24 upgrade contributor and contribution
aragon dao upgrade 0xc34edf7d11b7f8433d597f0bb0697acdff55ef14 kredits-contributor.open.aragonpm.eth --environment=rinkeby
eth-provider | Invalid provider preset/location: "local"
✔ Fetching kredits-contributor.open.aragonpm.eth@latest
✔ Upgrading app
✔ Successfully executed: "Set the resolving address of 'kredits-contributor.open.aragonpm.eth' in namespace 'App code' to 0xadefa3b66b68a127Fe38bEa1813b844EE69CFD86"
aragon dao upgrade 0xc34edf7d11b7f8433d597f0bb0697acdff55ef14 kredits-contribution.open.aragonpm.eth --environment=rinkeby
✔ Fetching kredits-contribution.open.aragonpm.eth@latest
✔ Upgrading app
✔ Successfully executed: "Set the resolving address of 'kredits-contribution.open.aragonpm.eth' in namespace 'App code' to 0x2c083EEA83fd3a99C93759D97D0317A43261c758"
## 2019-04-10 - Weltempfänger release
Using KreditsKit at: 0x76e069b47b79442657eaf0555a32c6b16fa1b8b4

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -2,15 +2,11 @@ const Base = require('./base');
const EthersUtils = require('ethers').utils;
class Acl extends Base {
hasPermission (fromAddress, contractAddress, roleID, params = null) {
let roleHash = EthersUtils.keccak256(EthersUtils.toUtf8Bytes(roleID));
return this.functions.hasPermission(
fromAddress,
contractAddress,
roleHash,
params
);
hasPermission(fromAddress, contractAddress, roleID, params = null) {
let roleHash = EthersUtils.keccak256(EthersUtils.toUtf8Bytes(roleID));
console.log(roleHash)
return this.functions.hasPermission(fromAddress, contractAddress, roleHash, params);
}
}

View File

@@ -1,24 +1,23 @@
class Base {
constructor (contract) {
constructor(contract) {
this.contract = contract;
}
get functions () {
get functions() {
return this.contract.functions;
}
get ipfs () {
get ipfs() {
if (!this._ipfsAPI) { throw new Error('IPFS API not configured; please set an ipfs instance'); }
return this._ipfsAPI;
}
set ipfs (ipfsAPI) {
set ipfs(ipfsAPI) {
this._ipfsAPI = ipfsAPI;
}
on (type, callback) {
on(type, callback) {
return this.contract.on(type, callback);
}
}
module.exports = Base;

View File

@@ -1,24 +1,37 @@
const Record = require('./record');
const ContributionSerializer = require('../serializers/contribution');
const ethers = require('ethers');
class Contribution extends Record {
get count () {
return this.functions.contributionsCount();
const ContributionSerializer = require('../serializers/contribution');
const Base = require('./base');
class Contribution extends Base {
all() {
return this.functions.contributionsCount()
.then(async (count) => {
let contributions = [];
for (let id = 1; id <= count; id++) {
const contribution = await this.getById(id)
contributions.push(contribution);
}
return contributions;
});
}
getById (id) {
getById(id) {
return this.functions.getContribution(id)
.then(data => {
return this.ipfs.catAndMerge(data, ContributionSerializer.deserialize);
});
}
getByContributorId (contributorId) {
getByContributorId(contributorId) {
return this.functions.getContributorAddressById(contributorId)
.then(address => this.getByContributorAddress(address));
}
getByContributorAddress (address) {
getByContributorAddress(address) {
return this.functions.balanceOf(address)
.then(async (balance) => {
const count = balance.toNumber();
@@ -34,17 +47,13 @@ class Contribution extends Record {
});
}
async addContribution (contributionAttr, callOptions = {}) {
const contribution = new ContributionSerializer(contributionAttr);
try { await contribution.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contribution.serialize();
addContribution(contributionAttr, callOptions = {}) {
let json = ContributionSerializer.serialize(contributionAttr);
// TODO: validate against schema
return this.ipfs
.add(jsonStr)
.then(ipfsHashAttr => {
.add(json)
.then((ipfsHashAttr) => {
let contribution = [
contributionAttr.amount,
contributionAttr.contributorId,

View File

@@ -1,29 +1,40 @@
const Record = require('./record');
const ContributorSerializer = require('../serializers/contributor');
const formatKredits = require('../utils/format-kredits');
const ethers = require('ethers');
const RSVP = require('rsvp');
class Contributor extends Record {
get count () {
return this.functions.contributorsCount();
const ContributorSerializer = require('../serializers/contributor');
const Base = require('./base');
class Contributor extends Base {
all() {
return this.functions.contributorsCount()
.then(count => {
let contributors = [];
for (let id = 1; id <= count; id++) {
contributors.push(this.getById(id));
}
return RSVP.all(contributors);
});
}
getById (id) {
getById(id) {
return this.functions.getContributorById(id)
.then(data => {
data.balanceInt = formatKredits(data.balance);
// Fetch IPFS data if available
.then((data) => {
return this.ipfs.catAndMerge(data, ContributorSerializer.deserialize);
});
}
filterByAccount (search) {
filterByAccount(search) {
return this._byAccount(search, 'filter');
}
findByAccount (search) {
findByAccount(search) {
return this._byAccount(search, 'find');
}
_byAccount (search, method = 'filter') {
_byAccount(search, method = 'filter') {
return this.all().then((contributors) => {
const searchEntries = Object.entries(search);
@@ -39,16 +50,12 @@ class Contributor extends Record {
});
}
async add (contributorAttr, callOptions = {}) {
let contributor = new ContributorSerializer(contributorAttr);
try { await contributor.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contributor.serialize();
add(contributorAttr, callOptions = {}) {
let json = ContributorSerializer.serialize(contributorAttr);
// TODO: validate against schema
return this.ipfs
.add(jsonStr)
.add(json)
.then((ipfsHashAttr) => {
let contributor = [
contributorAttr.account,
@@ -60,30 +67,6 @@ class Contributor extends Record {
return this.functions.addContributor(...contributor, callOptions);
});
}
updateProfile (contributorId, updateAttr, callOptions = {}) {
return this.getById(contributorId).then(async (contributor) => {
let updatedContributorAttr = Object.assign(contributor, updateAttr);
let updatedContributor = new ContributorSerializer(updatedContributorAttr);
try { await updatedContributor.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = updatedContributor.serialize();
return this.ipfs
.add(jsonStr)
.then(ipfsHashAttr => {
return this.functions.updateContributorProfileHash(
contributorId,
ipfsHashAttr.hashDigest,
ipfsHashAttr.hashFunction,
ipfsHashAttr.hashSize,
callOptions
);
});
});
}
}
module.exports = Contributor;

View File

@@ -4,5 +4,5 @@ module.exports = {
Proposal: require('./proposal'),
Token: require('./token'),
Kernel: require('./kernel'),
Acl: require('./acl'),
Acl: require('./acl')
};

View File

@@ -4,19 +4,19 @@ const Base = require('./base');
const KERNEL_APP_ADDR_NAMESPACE = '0xd6f028ca0e8edb4a8c9757ca4fdccab25fa1e0317da1188108f7d2dee14902fb';
class Kernel extends Base {
constructor (contract) {
constructor(contract) {
super(contract);
this.apm = 'aragonpm.eth'; // can be overwritten if needed
}
getApp (appName) {
getApp(appName) {
if (appName === 'Acl') {
return this.functions.acl();
}
return this.functions.getApp(KERNEL_APP_ADDR_NAMESPACE, this.appNamehash(appName));
}
appNamehash (appName) {
appNamehash(appName) {
return namehash(`kredits-${appName.toLowerCase()}.${this.apm}`);
}
}

View File

@@ -1,28 +1,36 @@
const Record = require('./record');
const ContributionSerializer = require('../serializers/contribution');
const ethers = require('ethers');
const RSVP = require('rsvp');
class Proposal extends Record {
get count () {
return this.functions.proposalsCount();
const ContributionSerializer = require('../serializers/contribution');
const Base = require('./base');
class Proposal extends Base {
all() {
return this.functions.proposalsCount()
.then(count => {
let proposals = [];
for (let id = 1; id <= count; id++) {
proposals.push(this.getById(id));
}
return RSVP.all(proposals);
});
}
getById (id) {
getById(id) {
return this.functions.getProposal(id)
.then(data => {
return this.ipfs.catAndMerge(data, ContributionSerializer.deserialize);
});
}
async addProposal (proposalAttr, callOptions = {}) {
const contribution = new ContributionSerializer(proposalAttr);
try { await contribution.validate(); }
catch (error) { return Promise.reject(error); }
const jsonStr = contribution.serialize();
addProposal(proposalAttr, callOptions = {}) {
let json = ContributionSerializer.serialize(proposalAttr);
// TODO: validate against schema
return this.ipfs
.add(jsonStr)
.add(json)
.then((ipfsHashAttr) => {
let proposal = [
proposalAttr.contributorId,
@@ -37,4 +45,4 @@ class Proposal extends Record {
}
}
module.exports = Proposal;
module.exports = Proposal

View File

@@ -1,14 +0,0 @@
const Base = require('./base');
const paged = require('../utils/pagination');
class Record extends Base {
all (options = {}) {
return this.count
.then((count) => {
let records = paged(count, options).map((id) => this.getById(id));
return Promise.all(records);
});
}
}
module.exports = Record;

View File

@@ -4,3 +4,4 @@ class Token extends Base {
}
module.exports = Token;

View File

@@ -1,7 +1,7 @@
const ethers = require('ethers');
const RSVP = require('rsvp');
const Preflight = require('./utils/preflight');
const deprecate = require('./utils/deprecate');
const ABIS = {
Contributor: require('./abis/Contributor.json'),
@@ -9,29 +9,29 @@ const ABIS = {
Token: require('./abis/Token.json'),
Proposal: require('./abis/Proposal.json'),
Kernel: require('./abis/Kernel.json'),
Acl: require('./abis/ACL.json'),
Acl: require('./abis/ACL.json')
};
const APP_CONTRACTS = [
'Contributor',
'Contribution',
'Token',
'Proposal',
'Acl',
'Acl'
];
const DaoAddresses = require('./addresses/dao.json');
const Contracts = require('./contracts');
const IPFS = require('./utils/ipfs');
const IPFS = require('./utils/ipfs')
// Helpers
function capitalize (word) {
function capitalize(word) {
let [first, ...rest] = word;
return `${first.toUpperCase()}${rest.join('')}`;
}
class Kredits {
constructor (provider, signer, options = {}) {
constructor(provider, signer, options = {}) {
let { addresses, abis, ipfsConfig } = options;
this.provider = provider;
@@ -41,35 +41,32 @@ class Kredits {
this.abis = abis || ABIS;
this.ipfs = new IPFS(ipfsConfig);
this.contracts = {};
this.networkId = null;
}
init (names) {
init(names) {
let contractsToLoad = names || APP_CONTRACTS;
return this.provider.getNetwork().then(network => {
this.networkId = network.chainId.toString();
this.addresses['Kernel'] = this.addresses['Kernel'] || DaoAddresses[this.networkId];
this.addresses['Kernel'] = this.addresses['Kernel'] || DaoAddresses[network.chainId.toString()];
let addressPromises = contractsToLoad.map((contractName) => {
return this.Kernel.getApp(contractName).then((address) => {
this.addresses[contractName] = address;
}).catch((error) => {
console.log(error);
throw new Error(`Failed to get address for ${contractName} from DAO at ${this.Kernel.contract.address}
- ${error.message}`
);
});
});
return Promise.all(addressPromises).then(() => { return this; });
return RSVP.all(addressPromises).then(() => { return this });
});
}
static setup (provider, signer, ipfsConfig = null) {
deprecate('Kredits.setup() is deprecated use new Kredits().init() instead');
static setup(provider, signer, ipfsConfig = null) {
console.log('Kredits.setup() is deprecated use new Kredits().init() instead');
return new Kredits(provider, signer, { ipfsConfig: ipfsConfig }).init();
}
get Kernel () {
get Kernel() {
let k = this.contractFor('Kernel');
// in case we want to use a special apm (e.g. development vs. production)
if (this.options.apm) {
@@ -78,37 +75,37 @@ class Kredits {
return k;
}
get Contributor () {
get Contributor() {
return this.contractFor('Contributor');
}
get Contributors () {
deprecate('Contributors is deprecated use Contributor instead');
get Contributors() {
console.log('Contributors is deprecated use Contributor instead');
return this.Contributor;
}
get Proposal () {
get Proposal() {
return this.contractFor('Proposal');
}
get Operator () {
get Operator() {
return this.Proposal;
}
get Token () {
get Token() {
return this.contractFor('Token');
}
get Contribution () {
get Contribution() {
return this.contractFor('Contribution');
}
get Acl () {
get Acl() {
return this.contractFor('Acl');
}
// Should be private
contractFor (name) {
contractFor(name) {
if (this.contracts[name]) {
return this.contracts[name];
}
@@ -128,7 +125,7 @@ class Kredits {
return this.contracts[name];
}
preflightChecks () {
preflightChecks() {
return new Preflight(this).check();
}
}

View File

@@ -1,75 +1,19 @@
const schemas = require('kosmos-schemas');
const validator = require('../utils/validator');
/**
* Serialization and validation for JSON-LD document of the contribution.
* Handle serialization for JSON-LD object of the contribution, according to
* https://github.com/67P/kosmos-schemas/blob/master/schemas/contribution.json
*
* @class
* @public
*/
class Contribution {
constructor (attrs) {
Object.keys(attrs).forEach(a => this[a] = attrs[a]);
}
/**
* Serialize object to JSON
*
* @public
*/
serialize () {
/**
* Deserialize JSON to object
*
* @method
* @public
*/
static deserialize(serialized) {
let {
contributorIpfsHash,
date,
time,
kind,
description,
url,
details,
} = this;
let data = {
'@context': 'https://schema.kosmos.org',
'@type': 'Contribution',
'contributor': {
'ipfs': contributorIpfsHash,
},
date,
time,
kind,
description,
'details': details || {},
};
if (url) {
data['url'] = url;
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
/**
* Validate serialized data against schema
*
* @public
*/
validate () {
const serialized = JSON.parse(this.serialize());
const valid = validator.validate(serialized, schemas['contribution']);
return valid ? Promise.resolve() : Promise.reject(validator.error);
}
/**
* Deserialize JSON to object
*
* @public
*/
static deserialize (serialized) {
let {
date,
time,
kind,
description,
details,
@@ -77,8 +21,6 @@ class Contribution {
} = JSON.parse(serialized.toString('utf8'));
return {
date,
time,
kind,
description,
details,
@@ -87,6 +29,39 @@ class Contribution {
};
}
/**
* Serialize object to JSON
*
* @method
* @public
*/
static serialize(deserialized) {
let {
contributorIpfsHash,
kind,
description,
url,
details
} = deserialized;
let data = {
"@context": "https://schema.kosmos.org",
"@type": "Contribution",
"contributor": {
"ipfs": contributorIpfsHash
},
kind,
description,
"details": details || {}
};
if (url) {
data["url"] = url;
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
}
module.exports = Contribution;

View File

@@ -1,5 +1,3 @@
const schemas = require('kosmos-schemas');
const validator = require('../utils/validator');
/**
* Handle serialization for JSON-LD object of the contributor, according to
* https://github.com/67P/kosmos-schemas/blob/master/schemas/contributor.json
@@ -8,87 +6,13 @@ const validator = require('../utils/validator');
* @public
*/
class Contributor {
constructor (attrs) {
Object.keys(attrs).forEach(a => this[a] = attrs[a]);
}
/**
* Serialize object to JSON
*
* @method
* @public
*/
serialize () {
let {
name,
kind,
url,
github_uid,
github_username,
gitea_username,
wiki_username,
} = this;
let data = {
'@context': 'https://schema.kosmos.org',
'@type': 'Contributor',
kind,
name,
'accounts': [],
};
if (url) {
data['url'] = url;
}
if (github_uid) {
data.accounts.push({
'site': 'github.com',
'uid': github_uid,
'username': github_username,
'url': `https://github.com/${github_username}`,
});
}
if (gitea_username) {
data.accounts.push({
'site': 'gitea.kosmos.org',
'username': gitea_username,
'url': `https://gitea.kosmos.org/${gitea_username}`,
});
}
if (wiki_username) {
data.accounts.push({
'site': 'wiki.kosmos.org',
'username': wiki_username,
'url': `https://wiki.kosmos.org/User:${wiki_username}`,
});
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
/**
* Validate serialized data against schema
*
* @public
*/
validate () {
const serialized = JSON.parse(this.serialize());
const valid = validator.validate(serialized, schemas['contributor']);
return valid ? Promise.resolve() : Promise.reject(validator.error);
}
/**
* Deserialize JSON to object
*
* @method
* @public
*/
static deserialize (serialized) {
static deserialize(serialized) {
let {
name,
kind,
@@ -96,17 +20,13 @@ class Contributor {
accounts,
} = JSON.parse(serialized.toString('utf8'));
let github_username, github_uid, gitea_username, wiki_username;
let github = accounts.find(a => a.site === 'github.com');
let gitea = accounts.find(a => a.site === 'gitea.kosmos.org');
let wiki = accounts.find(a => a.site === 'wiki.kosmos.org');
let github_username, github_uid, wiki_username;
let github = accounts.find((a) => a.site === 'github.com');
let wiki = accounts.find((a) => a.site === 'wiki.kosmos.org');
if (github) {
(({ username: github_username, uid: github_uid} = github));
}
if (gitea) {
(({ username: gitea_username } = gitea));
}
if (wiki) {
(({ username: wiki_username } = wiki));
}
@@ -118,12 +38,59 @@ class Contributor {
accounts,
github_uid,
github_username,
gitea_username,
wiki_username,
ipfsData: serialized,
};
}
/**
* Serialize object to JSON
*
* @method
* @public
*/
static serialize(deserialized) {
let {
name,
kind,
url,
github_uid,
github_username,
wiki_username,
} = deserialized;
let data = {
"@context": "https://schema.kosmos.org",
"@type": "Contributor",
kind,
name,
"accounts": []
};
if (url) {
data["url"] = url;
}
if (github_uid) {
data.accounts.push({
"site": "github.com",
"uid": github_uid,
"username": github_username,
"url": `https://github.com/${github_username}`
});
}
if (wiki_username) {
data.accounts.push({
"site": "wiki.kosmos.org",
"username": wiki_username,
"url": `https://wiki.kosmos.org/User:${wiki_username}`
});
}
// Write it pretty to ipfs
return JSON.stringify(data, null, 2);
}
}
module.exports = Contributor;

View File

@@ -1,5 +0,0 @@
/*eslint no-console: ["error", { allow: ["warn"] }] */
module.exports = function deprecate (msg) {
console.warn(msg);
};

View File

@@ -1,10 +0,0 @@
const ethersUtils = require('ethers').utils;
module.exports = function(value, options = {}) {
let etherValue = ethersUtils.formatEther(value);
if (options.asFloat) {
return parseFloat(etherValue);
} else {
return parseInt(etherValue);
}
};

View File

@@ -2,7 +2,8 @@ const ipfsClient = require('ipfs-http-client');
const multihashes = require('multihashes');
class IPFS {
constructor (config) {
constructor(config) {
if (!config) {
config = { host: 'localhost', port: '5001', protocol: 'http' };
}
@@ -10,15 +11,7 @@ class IPFS {
this._config = config;
}
get config () {
return this._config;
}
get peerId () {
return this._ipfsAPI.id();
}
catAndMerge (data, deserialize) {
catAndMerge(data, deserialize) {
// if no hash details are found simply return the data; nothing to merge
if (!data.hashSize || data.hashSize === 0) {
return data;
@@ -33,7 +26,7 @@ class IPFS {
});
}
add (data) {
add(data) {
return this._ipfsAPI
.add(ipfsClient.Buffer.from(data))
.then((res) => {
@@ -41,7 +34,7 @@ class IPFS {
});
}
cat (hashData) {
cat(hashData) {
let ipfsHash = hashData; // default - if it is a string
if (hashData.hasOwnProperty('hashSize')) {
ipfsHash = this.encodeHash(hashData);
@@ -49,20 +42,21 @@ class IPFS {
return this._ipfsAPI.cat(ipfsHash);
}
decodeHash (ipfsHash) {
decodeHash(ipfsHash) {
let multihash = multihashes.decode(multihashes.fromB58String(ipfsHash));
return {
hashDigest: '0x' + multihashes.toHexString(multihash.digest),
hashSize: multihash.length,
hashFunction: multihash.code,
ipfsHash: ipfsHash,
ipfsHash: ipfsHash
};
}
encodeHash (hashData) {
encodeHash(hashData) {
let digest = ipfsClient.Buffer.from(hashData.hashDigest.slice(2), 'hex');
return multihashes.encode(digest, hashData.hashFunction, hashData.hashSize);
}
}
module.exports = IPFS;

View File

@@ -1,46 +0,0 @@
function pageNumber (number, size, recordCount) {
let numberOfPages = Math.ceil(recordCount / size);
number = parseInt(number) || 1;
// Ensure page number is in range
number = number < 1 ? 1 : number;
number = number > numberOfPages ? numberOfPages : number;
return number;
}
function buildIds (order, number, size, recordCount) {
let offset = size * (number - 1);
let start;
let mapFunction;
if (order === 'asc') {
start = 1 + offset;
mapFunction = (_, i) => start + i;
} else {
start = recordCount - offset;
mapFunction = (_, i) => start - i;
}
// Ensure size is in range
let end = offset + size;
if (end > recordCount) {
let diff = end - recordCount;
size = size - diff;
}
return Array.from({ length: size }, mapFunction);
}
module.exports = function paged (recordCount, options = {}) {
let { order, page } = options;
order = order || 'desc';
page = page || {};
let size = parseInt(page.size) || 25;
let number = pageNumber(page.number, size, recordCount);
return buildIds(order, number, size, recordCount);
};

View File

@@ -1,31 +1,25 @@
class Preflight {
constructor (kredits) {
constructor(kredits) {
this.kredits = kredits;
}
check () {
return this.kredits.ipfs.peerId()
check() {
return this.kredits.ipfs._ipfsAPI.id()
.catch((error) => {
const ipfsConfig = JSON.stringify(this.kredits.ipfs.config);
throw new Error(`IPFS node not available; config: ${ipfsConfig} - ${error.message}`);
throw new Error(`IPFS node not available; config: ${JSON.stringify(this.kredits.ipfs.config)} - ${error.message}`);
})
.then(() => {
let promises = Object.keys(this.kredits.contracts).map((name) => {
let address = this.kredits.contracts[name].contract.address;
// TODO: I think this throws the error: Error: contract not deployed
// I guess we don't need that if check anymore...
return this.kredits.provider.getCode(address).then((code) => {
let contractWrapper = this.kredits.contracts[name];
return this.kredits.provider.getCode(contractWrapper.contract.address).then((code) => {
// not sure if we always get the same return value if the code is not available
// so checking if it is < 5 long
if (code === '0x00' || code.length < 5) {
throw new Error(`Contract for: ${name} not found at ${address} on network ${this.kredits.networkId}`);
throw new Error(`Contract for: ${name} not found at ${contractWrapper.contract.address} on network ${this.kredits.provider.chainId}`);
}
return true;
});
});
return Promise.all(promises);
});
}

View File

@@ -1,15 +0,0 @@
const tv4 = require('tv4');
const validator = tv4.freshApi();
validator.addFormat({
'date': function(value) {
const dateRegexp = /^[0-9]{4,}-[0-9]{2}-[0-9]{2}$/;
return dateRegexp.test(value) ? null : 'A valid ISO 8601 full-date string is expected';
},
'time': function(value) {
const timeRegexp = /^([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(([Zz])|([+|-]([01][0-9]|2[0-3]):[0-5][0-9]))$/;
return timeRegexp.test(value) ? null : 'A valid ISO 8601 full-time string is expected';
},
});
module.exports = validator;

1023
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "kredits-contracts",
"version": "5.3.0",
"version": "4.0.2",
"description": "Ethereum contracts and npm wrapper for Kredits",
"main": "./lib/kredits.js",
"directories": {
@@ -20,9 +20,6 @@
"deploy:apps": "./scripts/every-app.sh \"aragon apm publish major\"",
"devchain": "aragon devchain --port 7545",
"dao:address": "truffle exec scripts/current-address.js",
"lint:contracts": "solhint \"contracts/**/*.sol\" \"apps/*/contracts/**/*.sol\"",
"lint:contract-tests": "eslint apps/*/test",
"lint:wrapper": "eslint lib/",
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
@@ -40,21 +37,15 @@
"@aragon/kits-base": "^1.0.0",
"@aragon/os": "^4.1.0",
"async-each-series": "^1.1.0",
"eslint": "^5.16.0",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-node": "^8.0.1",
"eslint-plugin-promise": "^4.1.1",
"eth-provider": "^0.2.2",
"openzeppelin-solidity": "^2.2.0",
"promptly": "^3.0.3",
"solc": "^0.4.25",
"solhint": "^2.0.0"
"solc": "^0.4.25"
},
"dependencies": {
"ethers": "^4.0.27",
"ipfs-http-client": "^30.1.1",
"kosmos-schemas": "^2.0.0",
"tv4": "^1.3.0"
"rsvp": "^4.8.2"
},
"keywords": [
"kosmos",

View File

@@ -1,5 +1,4 @@
const promptly = require('promptly');
const { inspect } = require('util');
const initKredits = require('./helpers/init_kredits.js');
@@ -27,32 +26,23 @@ module.exports = async function(callback) {
console.log(`Creating a contribution for contributor account ${contributorAccount} ID: ${contributorId}`);
[ dateNow, timeNow ] = (new Date()).toISOString().split('T');
let contributionAttributes = {
contributorId,
date: dateNow,
time: timeNow,
amount: await promptly.prompt('Amount: '),
description: await promptly.prompt('Description: '),
kind: await promptly.prompt('Kind: ', { default: 'dev' }),
url: await promptly.prompt('URL: ', { default: '' })
}
const contributorData = await kredits.Contributor.getById(contributorId);
contributionAttributes.contributorIpfsHash = contributorData.ipfsHash;
console.log("\nAdding contribution:");
console.log(contributionAttributes);
kredits.Contribution.addContribution(contributionAttributes, { gasLimit: 300000 })
.then(result => {
console.log("\n\nResult:");
console.log(result);
callback();
})
.catch(error => {
console.log('Failed to create contribution');
callback(inspect(error));
});
kredits.Contribution.addContribution(contributionAttributes, { gasLimit: 300000 }).then((result) => {
console.log("\n\nResult:");
console.log(result);
callback();
}).catch((error) => {
console.log('Failed to create contribution');
callback(error);
});
}

View File

@@ -26,7 +26,7 @@ module.exports = async function(callback) {
kind: await prompt('Kind (default person): ', {default: 'person'}),
url: await prompt('URL: '),
github_username: await prompt('GitHub username: '),
github_uid: parseInt(await prompt('GitHub UID: ')),
github_uid: await prompt('GitHub UID: '),
wiki_username: await prompt('Wiki username: '),
};

View File

@@ -1,5 +1,4 @@
const promptly = require('promptly');
const { inspect } = require('util');
const initKredits = require('./helpers/init_kredits.js');
@@ -26,31 +25,23 @@ module.exports = async function(callback) {
}
console.log(`Creating a proposal for contributor ID #${contributorId} account: ${contributorAccount}`);
[ dateNow, timeNow ] = (new Date()).toISOString().split('T');
let contributionAttributes = {
contributorId,
date: dateNow,
time: timeNow,
amount: await promptly.prompt('Amount: '),
description: await promptly.prompt('Description: '),
kind: await promptly.prompt('Kind: ', { default: 'dev' }),
url: await promptly.prompt('URL: ', { default: '' })
}
const contributorData = await kredits.Contributor.getById(contributorId);
contributionAttributes.contributorIpfsHash = contributorData.ipfsHash;
console.log("\nAdding proposal:");
console.log(contributionAttributes);
kredits.Proposal.addProposal(contributionAttributes, { gasLimit: 300000 })
.then((result) => {
console.log("\n\nResult:");
console.log(result);
callback();
}).catch((error) => {
console.log('Failed to create proposal');
callback(inspect(error));
});
kredits.Proposal.addProposal(contributionAttributes, { gasLimit: 300000 }).then((result) => {
console.log("\n\nResult:");
console.log(result);
callback();
}).catch((error) => {
console.log('Failed to create proposal');
callback(error);
});
}

363
scripts/deploy-apm.js Normal file
View File

@@ -0,0 +1,363 @@
const namehash = require('eth-ens-namehash').hash
const keccak256 = require('js-sha3').keccak_256
const deployENS = require('@aragon/os/scripts/deploy-test-ens')
const deployDaoFactory = require('@aragon/os/scripts/deploy-daofactory')
const logDeploy = require('@aragon/os/scripts//helpers/deploy-logger')
const getAccounts = require('@aragon/os/scripts//helpers/get-accounts')
const globalArtifacts = this.artifacts // Not injected unless called directly via truffle
const globalWeb3 = this.web3 // Not injected unless called directly via truffle
const ZERO_ADDR = '0x0000000000000000000000000000000000000000'
const defaultOwner = process.env.OWNER
const defaultDaoFactoryAddress = process.env.DAO_FACTORY
const defaultENSAddress = process.env.ENS
module.exports = async (
truffleExecCallback,
{
artifacts = globalArtifacts,
web3 = globalWeb3,
ensAddress = defaultENSAddress,
owner = defaultOwner,
daoFactoryAddress = defaultDaoFactoryAddress,
verbose = true
} = {}
) => {
const log = (...args) => {
if (verbose) { console.log(...args) }
}
const APMRegistry = artifacts.require('APMRegistry')
const Repo = artifacts.require('Repo')
const ENSSubdomainRegistrar = artifacts.require('ENSSubdomainRegistrar')
const DAOFactory = artifacts.require('DAOFactory')
const APMRegistryFactory = artifacts.require('APMRegistryFactory')
const ENS = artifacts.require('ENS')
const Kernel = artifacts.require('Kernel')
const ACL = artifacts.require('ACL')
const tldName = 'eth'
const labelName = 'aragonpm'
const tldHash = namehash(tldName)
const labelHash = '0x'+keccak256(labelName)
const apmNode = namehash(`${labelName}.${tldName}`)
let ens
log('Deploying APM...')
const accounts = await getAccounts(web3)
if (!owner) {
owner = accounts[0]
log('OWNER env variable not found, setting APM owner to the provider\'s first account')
}
log('Owner:', owner)
if (!ensAddress) {
log('=========')
log('Missing ENS! Deploying a custom ENS...')
ens = (await deployENS(null, { artifacts, owner, verbose: false })).ens
ensAddress = ens.address
} else {
ens = ENS.at(ensAddress)
}
log('ENS:', ensAddress)
log(`TLD: ${tldName} (${tldHash})`)
log(`Label: ${labelName} (${labelHash})`)
log(`apmNode: ${apmNode}`)
log('=========')
log('Deploying APM bases...')
const apmRegistryBase = await APMRegistry.new()
await logDeploy(apmRegistryBase, { verbose })
const apmRepoBase = await Repo.new()
await logDeploy(apmRepoBase, { verbose })
const ensSubdomainRegistrarBase = await ENSSubdomainRegistrar.new()
await logDeploy(ensSubdomainRegistrarBase, { verbose })
let daoFactory
if (daoFactoryAddress) {
daoFactory = DAOFactory.at(daoFactoryAddress)
const hasEVMScripts = await daoFactory.regFactory() !== ZERO_ADDR
log(`Using provided DAOFactory (with${hasEVMScripts ? '' : 'out' } EVMScripts):`, daoFactoryAddress)
} else {
log('Deploying DAOFactory with EVMScripts...')
daoFactory = (await deployDaoFactory(null, { artifacts, withEvmScriptRegistryFactory: true, verbose: false })).daoFactory
}
log('Deploying APMRegistryFactory...')
const apmFactory = await APMRegistryFactory.new(
daoFactory.address,
apmRegistryBase.address,
apmRepoBase.address,
ensSubdomainRegistrarBase.address,
ensAddress,
'0x00'
)
await logDeploy(apmFactory, { verbose })
log(`Assigning ENS name (${labelName}.${tldName}) to factory... ${apmFactory.address}`)
if (await ens.owner(apmNode) === accounts[0]) {
log('Transferring name ownership from deployer to APMRegistryFactory')
await ens.setOwner(apmNode, apmFactory.address)
} else {
log('Creating subdomain and assigning it to APMRegistryFactory')
try {
await ens.setSubnodeOwner(tldHash, labelHash, apmFactory.address)
//await ens.setSubnodeOwner(apmNode, keccak256('open'), apmFactory.address)
} catch (err) {
console.error(err);
console.error(
`Error: could not set the owner of '${labelName}.${tldName}' on the given ENS instance`,
`(${ensAddress}). Make sure you have ownership rights over the subdomain.`
)
throw err
}
}
log('Deploying APM...')
const receipt = await apmFactory.newAPM(tldHash, labelHash, owner)
log('=========')
const apmAddr = receipt.logs.filter(l => l.event == 'DeployAPM')[0].args.apm
console.log(receipt.logs);
const apmDAO = APMRegistry.at(apmAddr);
log('Address:', apmAddr)
log('Transaction hash:', receipt.tx)
log('Deploying subdomain APM bases...')
const subApmRegistryBase = await APMRegistry.new()
await logDeploy(subApmRegistryBase, { verbose })
const subApmRepoBase = await Repo.new()
await logDeploy(subApmRepoBase, { verbose })
const subEnsSubdomainRegistrarBase = await ENSSubdomainRegistrar.new()
await logDeploy(subEnsSubdomainRegistrarBase, { verbose })
log('Deploying APMRegistryFactory...')
const subApmFactory = await APMRegistryFactory.new(
daoFactory.address,
subApmRegistryBase.address,
subApmRepoBase.address,
subEnsSubdomainRegistrarBase.address,
ensAddress,
'0x00'
)
await logDeploy(subApmFactory, { verbose })
const kernelAddr = await apmDAO.kernel();
console.log(kernelAddr);
const aclAddr = await Kernel.at(kernelAddr).acl();
const acl = ACL.at(aclAddr);
const role = await ensSubdomainRegistrarBase.CREATE_NAME_ROLE();
const registrarAddr = await apmDAO.registrar();
const registrar = ENSSubdomainRegistrar.at(registrarAddr);
console.log(owner, registrarAddr, role);
try {
await acl.grantPermission(owner, registrarAddr, role);
log('Deploying subdomain APM...')
console.log(await acl.hasPermission(owner, registrar.address, role))
console.log(subApmFactory.address)
await registrar.createNameAndPoint(namehash('open'), subApmFactory.address)
const subReceipt = await subApmFactory.newAPM(namehash('aragonpm.eth'), namehash('open'), owner)
log('=========')
const subApmAddr = subReceipt.logs.filter(l => l.event == 'DeployAPM')[0].args.apm
console.log(subReceipt.logs);
console.log(subApmAddr);
} catch(e) {
console.log(e);
}
if (typeof truffleExecCallback === 'function') {
// Called directly via `truffle exec`
truffleExecCallback()
} else {
return {
apmFactory,
ens,
apm: APMRegistry.at(apmAddr),
}
}
}
/*
"
const namehash = require('eth-ens-namehash').hash
const keccak256 = require('js-sha3').keccak_256
const deployENS = require('@aragon/os/scripts/deploy-test-ens')
const deployDaoFactory = require('@aragon/os/scripts/deploy-daofactory')
const logDeploy = require('@aragon/os/scripts//helpers/deploy-logger')
const getAccounts = require('@aragon/os/scripts//helpers/get-accounts')
const globalArtifacts = this.artifacts // Not injected unless called directly via truffle
const globalWeb3 = this.web3 // Not injected unless called directly via truffle
const ZERO_ADDR = '0x0000000000000000000000000000000000000000'
const defaultOwner = process.env.OWNER
const defaultDaoFactoryAddress = process.env.DAO_FACTORY
const defaultENSAddress = process.env.ENS
module.exports = async (
truffleExecCallback,
{
artifacts = globalArtifacts,
web3 = globalWeb3,
ensAddress = defaultENSAddress,
owner = defaultOwner,
daoFactoryAddress = defaultDaoFactoryAddress,
verbose = true
} = {}
) => {
const log = (...args) => {
if (verbose) { console.log(...args) }
}
const APMRegistry = artifacts.require('APMRegistry')
const Repo = artifacts.require('Repo')
const ENSSubdomainRegistrar = artifacts.require('ENSSubdomainRegistrar')
const DAOFactory = artifacts.require('DAOFactory')
const APMRegistryFactory = artifacts.require('APMRegistryFactory')
const ENS = artifacts.require('ENS')
const tldName = 'eth'
const labelName = 'open.aragonpm'
const tldHash = namehash(tldName)
const labelHash = '0x'+keccak256(labelName)
const apmNode = namehash(`${labelName}.${tldName}`)
let ens
log('Deploying APM...')
const accounts = await getAccounts(web3)
if (!owner) {
owner = accounts[0]
log('OWNER env variable not found, setting APM owner to the provider\'s first account')
}
log('Owner:', owner)
if (!ensAddress) {
log('=========')
log('Missing ENS! Deploying a custom ENS...')
ens = (await deployENS(null, { artifacts, owner, verbose: false })).ens
ensAddress = ens.address
} else {
ens = ENS.at(ensAddress)
}
log('ENS:', ensAddress)
log(`TLD: ${tldName} (${tldHash})`)
log(`Label: ${labelName} (${labelHash})`)
log('=========')
log('Deploying APM bases...')
const apmRegistryBase = await APMRegistry.new()
await logDeploy(apmRegistryBase, { verbose })
const apmRepoBase = await Repo.new()
await logDeploy(apmRepoBase, { verbose })
const ensSubdomainRegistrarBase = await ENSSubdomainRegistrar.new()
await logDeploy(ensSubdomainRegistrarBase, { verbose })
let daoFactory
if (daoFactoryAddress) {
daoFactory = DAOFactory.at(daoFactoryAddress)
const hasEVMScripts = await daoFactory.regFactory() !== ZERO_ADDR
log(`Using provided DAOFactory (with${hasEVMScripts ? '' : 'out' } EVMScripts):`, daoFactoryAddress)
} else {
log('Deploying DAOFactory with EVMScripts...')
daoFactory = (await deployDaoFactory(null, { artifacts, withEvmScriptRegistryFactory: true, verbose: false })).daoFactory
}
log('Deploying APMRegistryFactory...')
const apmFactory = await APMRegistryFactory.new(
daoFactory.address,
apmRegistryBase.address,
apmRepoBase.address,
ensSubdomainRegistrarBase.address,
ensAddress,
'0x00'
)
await logDeploy(apmFactory, { verbose })
log(`Assigning ENS name (${labelName}.${tldName}) to factory...`)
if (await ens.owner(apmNode) === accounts[0]) {
log('Transferring name ownership from deployer to APMRegistryFactory')
await ens.setOwner(apmNode, apmFactory.address)
} else {
log('Creating subdomain and assigning it to APMRegistryFactory')
try {
await ens.setSubnodeOwner(tldHash, labelHash, apmFactory.address)
} catch (err) {
console.error(
`Error: could not set the owner of '${labelName}.${tldName}' on the given ENS instance`,
`(${ensAddress}). Make sure you have ownership rights over the subdomain.`
)
throw err
}
}
log('Deploying APM...')
const receipt = await apmFactory.newAPM(tldHash, labelHash, owner)
log('=========')
const apmAddr = receipt.logs.filter(l => l.event == 'DeployAPM')[0].args.apm
log('# APM:')
log('Address:', apmAddr)
log('Transaction hash:', receipt.tx)
log('=========')
try {
const kernel = await ensSubdomainRegistrarBase.kernel();
const acl = await kernel.acl();
console.log(acl);
let ret = await ensSubdomainRegistrarBase.createNameAndPoint(labelHash, apmAddr).then(console.log).catch(console.log);
console.log(ret);
} catch(e) {
console.log(e);
}
if (typeof truffleExecCallback === 'function') {
// Called directly via `truffle exec`
truffleExecCallback()
} else {
return {
apmFactory,
ens,
apm: APMRegistry.at(apmAddr),
}
}
}
*/

View File

@@ -15,14 +15,13 @@ module.exports = async function(callback) {
console.log(`Using Contribution at: ${kredits.Contribution.contract.address}`);
const table = new Table({
head: ['ID', 'Contributor ID', 'Description', 'Amount', 'Confirmed?', 'Vetoed?', 'Claimed?', 'IPFS']
head: ['ID', 'Contributor ID', 'Description', 'Amount', 'Confirmed?', 'Vetoed?', 'Claimed?']
})
try {
let blockNumber = await kredits.provider.getBlockNumber();
let contributions = await kredits.Contribution.all();
console.log(`Current block number: ${blockNumber}`);
contributions.forEach((c) => {
const confirmed = c.confirmedAtBlock <= blockNumber;
@@ -31,17 +30,13 @@ module.exports = async function(callback) {
c.contributorId,
`${c.description}`,
c.amount.toString(),
`${confirmed} (${c.confirmedAtBlock})`,
confirmed,
c.vetoed,
c.claimed,
c.ipfsHash
])
});
console.log(table.toString());
let totalKreditsEarned = await kredits.Contribution.functions.totalKreditsEarned(true);
console.log(`Total confirmed kredits: ${totalKreditsEarned}`);
} catch (err) {
console.log(err);
}

View File

@@ -15,31 +15,23 @@ module.exports = async function(callback) {
console.log(`Using Contributor at: ${kredits.Contributor.contract.address}`);
const table = new Table({
head: ['ID', 'Account', 'Name', 'Core?', 'Balance', 'Kredits earned', 'Contributions count', 'IPFS']
head: ['ID', 'Account', 'Core?', 'Name', 'Balance']
})
try {
const contributors = await kredits.Contributor.all()
contributors.forEach((c) => {
table.push([
c.id.toString(),
c.account,
`${c.name}`,
c.isCore,
c.balanceInt.toString(),
c.totalKreditsEarned.toString(),
c.contributionsCount.toString(),
c.ipfsHash
])
})
console.log(table.toString())
} catch(e) {
callback(e);
return;
}
let contributors = await kredits.Contributor.all()
contributors.forEach((c) => {
table.push([
c.id.toString(),
c.account,
c.isCore,
`${c.name}`,
ethers.utils.formatEther(c.balance)
])
})
console.log(table.toString())
callback()
}

8836
yarn.lock

File diff suppressed because it is too large Load Diff