Adapt code for new kredits release/contracts

Also process loading and pinning functions in batches, to prevent stack
size errors on RSK nodes.
This commit is contained in:
Râu Cao 2022-11-01 15:02:18 +01:00
parent e12e798735
commit 4a3988137e
Signed by: raucao
GPG Key ID: 15E65F399D084BA9
2 changed files with 42 additions and 26 deletions

23
index.js Normal file → Executable file
View File

@ -1,14 +1,14 @@
#!/usr/bin/env node #!/usr/bin/env node
const Kredits = require('kredits-contracts'); const fs = require('fs');
const IpfsPinner = require('./lib/ipfs-pinner');
const debug = require('debug')('ipfs-pinner'); const debug = require('debug')('ipfs-pinner');
const Kredits = require('@kredits/contracts');
const IpfsPinner = require('./lib/ipfs-pinner');
const defaultPeers = JSON.parse(fs.readFileSync('./config/peers.json')); const defaultPeers = JSON.parse(fs.readFileSync('./config/peers.json'));
const argv = require('yargs') const argv = require('yargs')
.default({ .default({
network: 'rinkeby', rpcUrl: 'https://rsk-testnet.kosmos.org',
apm: 'open.aragonpm.eth',
host: 'localhost', host: 'localhost',
port: '5001', port: '5001',
protocol: 'http', protocol: 'http',
@ -17,16 +17,14 @@ const argv = require('yargs')
}) })
.boolean('monitor') .boolean('monitor')
.describe({ .describe({
network: 'Ethereum network to connect to', rpcUrl: 'Web3/EVM node RPC URL; alternative to --network',
rpcUrl: 'Ethereum node RPC URL; alternative to --network',
daoAddress: 'Optional Kredits DAO address',
host: 'IPFS API host', host: 'IPFS API host',
port: 'IPFS API port', port: 'IPFS API port',
protocol: 'IPFS API protocol', protocol: 'IPFS API protocol',
monitor: 'Monitor contract events for new IPFS documents', monitor: 'Monitor contract events for new IPFS documents',
bootstrapNode: 'IPFS node address to connect to before fetching documents' bootstrapNode: 'IPFS node address to connect to before fetching documents'
}) })
.example('$0 --network rinkeby --host localhost', 'Pins all existing IPFS documents to the IPFS API running on localhost and monitors for new events.') .example('$0 --host localhost', 'Pins all existing IPFS documents to the IPFS API running on localhost and monitors for new events')
.argv; .argv;
const ipfsConfig = { const ipfsConfig = {
@ -40,11 +38,11 @@ console.log(`Using IPFS:`, ipfsConfig);
(async () => { (async () => {
try { try {
const kredits = await Kredits.for( const kredits = await Kredits.for(
{ network: argv.network, rpcUrl: argv.rpcUrl }, { rpcUrl: argv.rpcUrl },
{ apm: argv.apm, ipfsConfig: ipfsConfig, addresses: { Kernel: argv.daoAddress } } { ipfsConfig: ipfsConfig }
).init(); ).init();
// check the connection to the IPFS client // Check the connection to the IPFS client
// TODO redesign IPFS wrapper API and do not use an internal attribute // TODO redesign IPFS wrapper API and do not use an internal attribute
const ipfsApi = kredits.ipfs._ipfsAPI; const ipfsApi = kredits.ipfs._ipfsAPI;
@ -63,7 +61,8 @@ console.log(`Using IPFS:`, ipfsConfig);
console.log('Pinned a new document:', pin[0]["hash"]); console.log('Pinned a new document:', pin[0]["hash"]);
}); });
console.log(`Subscribed to DAO: ${kredits.Kernel.contract.address}`); // TODO Add new deployment/DAO/org ID or all contract proxy addresses
// console.log(`Subscribed to DAO: ${kredits.Kernel.contract.address}`);
} catch(e) { } catch(e) {
console.log('Failed to start'); console.log('Failed to start');
console.log(e); console.log(e);

View File

@ -1,5 +1,16 @@
const debug = require('debug')('ipfs-pinner'); const debug = require('debug')('ipfs-pinner');
async function promiseAllInBatches(task, items, batchSize) {
let position = 0;
let results = [];
while (position < items.length) {
const itemsForBatch = items.slice(position, position + batchSize);
results = [...results, ...await Promise.allSettled(itemsForBatch.map(item => task(item)))];
position += batchSize;
}
return results;
}
class IpfsPinner { class IpfsPinner {
constructor (kredits, ipfsApi) { constructor (kredits, ipfsApi) {
this.kredits = kredits; this.kredits = kredits;
@ -9,10 +20,10 @@ class IpfsPinner {
async pinAll () { async pinAll () {
const contributorHashes = await this._pinAllFromContract(this.kredits.Contributor); const contributorHashes = await this._pinAllFromContract(this.kredits.Contributor);
const contributionHashes = await this._pinAllFromContract(this.kredits.Contribution); const contributionHashes = await this._pinAllFromContract(this.kredits.Contribution);
const proposalHashes = await this._pinAllFromContract(this.kredits.Proposal); // const proposalHashes = await this._pinAllFromContract(this.kredits.Proposal);
return contributorHashes.concat(contributionHashes) return contributorHashes.concat(contributionHashes);
.concat(proposalHashes); // .concat(proposalHashes);
} }
monitor (callback) { monitor (callback) {
@ -33,19 +44,25 @@ class IpfsPinner {
}); });
} }
_pinAllFromContract (contract) { async _pinAllFromContract (contract) {
debug(`Pinning data from ${contract.constructor.name}...`); debug(`Pinning data from ${contract.constructor.name}...`);
return contract.count.then(count => { const count = await contract.count;
let promises = [...Array(count).keys()].map(i => { debug('Item count:', count);
let id = i + 1; // 0 => 1 - ids start with 1 and not with 0 const ids = [...Array(count).keys()].map(i => i+1);
debug(`Loading ${contract.constructor.name} #${id}`); const cids = [];
return contract.getData(id).then(data => {
debug(`Pinning ${contract.constructor.name} #${id}`); async function loadAndPin (id) {
return this.ipfsApi.pin(data); debug(`Loading ${contract.constructor.name} #${id}`);
}); return contract.getData(id).then(data => {
debug(`Pinning ${contract.constructor.name} #${id}`);
return this.ipfsApi.pin(data).then(cid => cids.push(cid));
}); });
return Promise.all(promises); }
});
await promiseAllInBatches(loadAndPin.bind(this), ids, 100);
return cids;
} }
} }
module.exports = IpfsPinner; module.exports = IpfsPinner;