Adapt code for new kredits release/contracts
Also process loading and pinning functions in batches, to prevent stack size errors on RSK nodes.
This commit is contained in:
parent
e12e798735
commit
4a3988137e
|
@ -1,14 +1,14 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const Kredits = require('kredits-contracts');
|
||||
const IpfsPinner = require('./lib/ipfs-pinner');
|
||||
const fs = require('fs');
|
||||
const debug = require('debug')('ipfs-pinner');
|
||||
const Kredits = require('@kredits/contracts');
|
||||
const IpfsPinner = require('./lib/ipfs-pinner');
|
||||
const defaultPeers = JSON.parse(fs.readFileSync('./config/peers.json'));
|
||||
|
||||
const argv = require('yargs')
|
||||
.default({
|
||||
network: 'rinkeby',
|
||||
apm: 'open.aragonpm.eth',
|
||||
rpcUrl: 'https://rsk-testnet.kosmos.org',
|
||||
host: 'localhost',
|
||||
port: '5001',
|
||||
protocol: 'http',
|
||||
|
@ -17,16 +17,14 @@ const argv = require('yargs')
|
|||
})
|
||||
.boolean('monitor')
|
||||
.describe({
|
||||
network: 'Ethereum network to connect to',
|
||||
rpcUrl: 'Ethereum node RPC URL; alternative to --network',
|
||||
daoAddress: 'Optional Kredits DAO address',
|
||||
rpcUrl: 'Web3/EVM node RPC URL; alternative to --network',
|
||||
host: 'IPFS API host',
|
||||
port: 'IPFS API port',
|
||||
protocol: 'IPFS API protocol',
|
||||
monitor: 'Monitor contract events for new IPFS documents',
|
||||
bootstrapNode: 'IPFS node address to connect to before fetching documents'
|
||||
})
|
||||
.example('$0 --network rinkeby --host localhost', 'Pins all existing IPFS documents to the IPFS API running on localhost and monitors for new events.')
|
||||
.example('$0 --host localhost', 'Pins all existing IPFS documents to the IPFS API running on localhost and monitors for new events')
|
||||
.argv;
|
||||
|
||||
const ipfsConfig = {
|
||||
|
@ -40,11 +38,11 @@ console.log(`Using IPFS:`, ipfsConfig);
|
|||
(async () => {
|
||||
try {
|
||||
const kredits = await Kredits.for(
|
||||
{ network: argv.network, rpcUrl: argv.rpcUrl },
|
||||
{ apm: argv.apm, ipfsConfig: ipfsConfig, addresses: { Kernel: argv.daoAddress } }
|
||||
{ rpcUrl: argv.rpcUrl },
|
||||
{ ipfsConfig: ipfsConfig }
|
||||
).init();
|
||||
|
||||
// check the connection to the IPFS client
|
||||
// Check the connection to the IPFS client
|
||||
// TODO redesign IPFS wrapper API and do not use an internal attribute
|
||||
const ipfsApi = kredits.ipfs._ipfsAPI;
|
||||
|
||||
|
@ -63,7 +61,8 @@ console.log(`Using IPFS:`, ipfsConfig);
|
|||
console.log('Pinned a new document:', pin[0]["hash"]);
|
||||
});
|
||||
|
||||
console.log(`Subscribed to DAO: ${kredits.Kernel.contract.address}`);
|
||||
// TODO Add new deployment/DAO/org ID or all contract proxy addresses
|
||||
// console.log(`Subscribed to DAO: ${kredits.Kernel.contract.address}`);
|
||||
} catch(e) {
|
||||
console.log('Failed to start');
|
||||
console.log(e);
|
||||
|
|
|
@ -1,5 +1,16 @@
|
|||
const debug = require('debug')('ipfs-pinner');
|
||||
|
||||
async function promiseAllInBatches(task, items, batchSize) {
|
||||
let position = 0;
|
||||
let results = [];
|
||||
while (position < items.length) {
|
||||
const itemsForBatch = items.slice(position, position + batchSize);
|
||||
results = [...results, ...await Promise.allSettled(itemsForBatch.map(item => task(item)))];
|
||||
position += batchSize;
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
class IpfsPinner {
|
||||
constructor (kredits, ipfsApi) {
|
||||
this.kredits = kredits;
|
||||
|
@ -9,10 +20,10 @@ class IpfsPinner {
|
|||
async pinAll () {
|
||||
const contributorHashes = await this._pinAllFromContract(this.kredits.Contributor);
|
||||
const contributionHashes = await this._pinAllFromContract(this.kredits.Contribution);
|
||||
const proposalHashes = await this._pinAllFromContract(this.kredits.Proposal);
|
||||
// const proposalHashes = await this._pinAllFromContract(this.kredits.Proposal);
|
||||
|
||||
return contributorHashes.concat(contributionHashes)
|
||||
.concat(proposalHashes);
|
||||
return contributorHashes.concat(contributionHashes);
|
||||
// .concat(proposalHashes);
|
||||
}
|
||||
|
||||
monitor (callback) {
|
||||
|
@ -33,19 +44,25 @@ class IpfsPinner {
|
|||
});
|
||||
}
|
||||
|
||||
_pinAllFromContract (contract) {
|
||||
async _pinAllFromContract (contract) {
|
||||
debug(`Pinning data from ${contract.constructor.name}...`);
|
||||
return contract.count.then(count => {
|
||||
let promises = [...Array(count).keys()].map(i => {
|
||||
let id = i + 1; // 0 => 1 - ids start with 1 and not with 0
|
||||
debug(`Loading ${contract.constructor.name} #${id}`);
|
||||
return contract.getData(id).then(data => {
|
||||
debug(`Pinning ${contract.constructor.name} #${id}`);
|
||||
return this.ipfsApi.pin(data);
|
||||
});
|
||||
const count = await contract.count;
|
||||
debug('Item count:', count);
|
||||
const ids = [...Array(count).keys()].map(i => i+1);
|
||||
const cids = [];
|
||||
|
||||
async function loadAndPin (id) {
|
||||
debug(`Loading ${contract.constructor.name} #${id}`);
|
||||
return contract.getData(id).then(data => {
|
||||
debug(`Pinning ${contract.constructor.name} #${id}`);
|
||||
return this.ipfsApi.pin(data).then(cid => cids.push(cid));
|
||||
});
|
||||
return Promise.all(promises);
|
||||
});
|
||||
}
|
||||
|
||||
await promiseAllInBatches(loadAndPin.bind(this), ids, 100);
|
||||
|
||||
return cids;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = IpfsPinner;
|
||||
|
|
Loading…
Reference in New Issue