Compare commits

..

No commits in common. "v2.0.0" and "v1.2.0" have entirely different histories.

6 changed files with 1817 additions and 3064 deletions

View File

@ -1,6 +1,6 @@
# Kredits IPFS Pinner # Kredits IPFS Pinner
This tool pins the IPFS documents of a Kredits organization on any IPFS node. This tool pins the IPFS hashes of a Kredits organisation on an IPFS node.
## Usage ## Usage
@ -9,11 +9,11 @@ config flags, in case it is not running on localhost with default ports.)
### With NPX magic ### With NPX magic
npx @kredits/ipfs-pinner npx @kosmos/kredits-ipfs-pinner
### Global install ### Global install
npm install -g @kredits/ipfs-pinner npm install -g @kosmos/kredits-ipfs-pinner
kredits-ipfs-pinner kredits-ipfs-pinner
### From repo ### From repo

View File

@ -1,6 +0,0 @@
[
{
"ID": "QmRw21aC3TroRVdZhrE2Qh4W6PBA67kbE8p3fNfsVcfW8D",
"Addrs": ["/dns4/draco.kosmos.org/tcp/4001"]
}
]

View File

@ -1,33 +1,31 @@
#!/usr/bin/env node #!/usr/bin/env node
const fs = require('fs'); const Kredits = require('kredits-contracts');
const debug = require('debug')('ipfs-pinner');
const Kredits = require('@kredits/contracts');
const IpfsPinner = require('./lib/ipfs-pinner'); const IpfsPinner = require('./lib/ipfs-pinner');
const defaultPeers = JSON.parse(fs.readFileSync('./config/peers.json')); const debug = require('debug')('ipfs-pinner');
const argv = require('yargs') const argv = require('yargs')
.default({ .default({
rpcUrl: 'https://rsk-testnet.kosmos.org', network: 'rinkeby',
apm: 'open.aragonpm.eth',
host: 'localhost', host: 'localhost',
port: '5001', port: '5001',
protocol: 'http', protocol: 'http',
watch: false, monitor: true,
progress: false, bootstrapNode: '/ip4/46.4.18.160/tcp/4001/ipfs/QmZ4Lpzhz8bafbTYvEMMCmrbrMM4JfyHDR23WbCSAd9bo7'
bootstrapNode: `${defaultPeers[0].Addrs[0]}/ipfs/${defaultPeers[0].ID}`
}) })
.boolean('watch') .boolean('monitor')
.boolean('progress')
.describe({ .describe({
rpcUrl: 'Web3/EVM node RPC URL; alternative to --network', network: 'Ethereum network to connect to',
rpcUrl: 'Ethereum node RPC URL; alternative to --network',
daoAddress: 'Optional Kredits DAO address',
host: 'IPFS API host', host: 'IPFS API host',
port: 'IPFS API port', port: 'IPFS API port',
protocol: 'IPFS API protocol', protocol: 'IPFS API protocol',
watch: 'Monitor contract events for new IPFS documents', monitor: 'Monitor contract events for new IPFS documents',
progress: 'Show progress bars',
bootstrapNode: 'IPFS node address to connect to before fetching documents' bootstrapNode: 'IPFS node address to connect to before fetching documents'
}) })
.example('$0 --host localhost', 'Pins all existing IPFS documents to the IPFS API running on localhost and monitors for new events') .example('$0 --network rinkeby --host localhost', 'Pins all existing IPFS documents to the IPFS API running on localhost and monitors for new events.')
.argv; .argv;
const ipfsConfig = { const ipfsConfig = {
@ -36,48 +34,38 @@ const ipfsConfig = {
protocol: argv.protocol protocol: argv.protocol
}; };
debug(`IPFS node:`, ipfsConfig); console.log(`Using IPFS:`, ipfsConfig);
(async () => { (async () => {
const kredits = await Kredits.for( try {
{ rpcUrl: argv.rpcUrl }, const kredits = await Kredits.for(
{ ipfsConfig: ipfsConfig } { network: argv.network, rpcUrl: argv.rpcUrl },
).init().catch(e => { { apm: argv.apm, ipfsConfig: ipfsConfig, addresses: { Kernel: argv.daoAddress } }
console.log('Failed to initialize Kredits:'); ).init();
console.log(e.message);
process.exit(1);
});
// TODO redesign IPFS wrapper API and do not use an internal attribute // check the connection to the IPFS client
const ipfsApi = kredits.ipfs._ipfsAPI; // TODO redesign IPFS wrapper API and do not use an internal attribute
const ipfsApi = kredits.ipfs._ipfsAPI;
await ipfsApi.id().catch(e => { await ipfsApi.id();
console.log('Failed to initialize IPFS:');
console.log(e.message);
process.exit(1);
});
debug(`Connecting to known IPFS node ${argv.bootstrapNode}`); debug(`Connecting to known IPFS node ${argv.bootstrapNode}`);
await ipfsApi.swarm.connect(argv.bootstrapNode); await ipfsApi.swarm.connect(argv.bootstrapNode);
const ipfsPinner = new IpfsPinner(kredits, { const ipfsPinner = new IpfsPinner(kredits);
progress: argv.progress
});
await ipfsPinner.pinAll().then(cids => { ipfsPinner.pinAll().then(pins => {
console.log(`\nSuccessfully pinned ${cids.length} documents`) console.log(`Pinned ${pins.length} existing documents`);
}); });
if (argv.watch) { ipfsPinner.monitor(pin => {
console.log('\nWatching contract events for new documents...');
ipfsPinner.watch(pin => {
console.log('Pinned a new document:', pin[0]["hash"]); console.log('Pinned a new document:', pin[0]["hash"]);
}); });
} else {
process.exit(0);
}
// TODO Add new deployment/DAO/org ID or all contract proxy addresses console.log(`Subscribed to DAO: ${kredits.Kernel.contract.address}`);
// console.log(`Subscribed to DAO: ${kredits.Kernel.contract.address}`); } catch(e) {
console.log('Failed to start');
console.log(e);
process.exit(1);
}
})(); })();

View File

@ -1,62 +1,21 @@
const debug = require('debug')('ipfs-pinner'); const debug = require('debug')('ipfs-pinner');
const cliProgress = require('cli-progress');
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
class IpfsPinner { class IpfsPinner {
constructor (kredits, options={}) { constructor (kredits, ipfsApi) {
this.kredits = kredits; this.kredits = kredits;
this.ipfsApi = this.kredits.ipfs; this.ipfsApi = ipfsApi || this.kredits.ipfs;
this.progressBars = !!options.progress && !process.env.DEBUG;
if (this.progressBars) {
this.multibar = new cliProgress.MultiBar({
stopOnComplete: true,
clearOnComplete: false,
hideCursor: false,
etaBuffer: 30,
format: '{entity} [{bar}] {percentage}% | ETA: {eta_formatted} | {value}/{total}'
}, cliProgress.Presets.shades_grey);
}
} }
async pinAll () { async pinAll () {
console.log('Pinning IPFS documents for all known items...\n') const contributorHashes = await this._pinAllFromContract(this.kredits.Contributor);
const cids = []; const contributionHashes = await this._pinAllFromContract(this.kredits.Contribution);
const promises = []; const proposalHashes = await this._pinAllFromContract(this.kredits.Proposal);
const contracts = [
this.kredits.Contributor,
this.kredits.Contribution,
// TODO uncomment once we have data here
// this.kredits.Reimbursement
]
for (const contract of contracts) { return contributorHashes.concat(contributionHashes)
debug(`Pinning data from ${contract.constructor.name}...`); .concat(proposalHashes);
const itemCount = await contract.count;
debug(`${contract.constructor.name} item count:`, itemCount);
let bar;
if (this.progressBars) {
bar = this.multibar.create(itemCount, 0);
bar.update(0, {entity: `${contract.constructor.name}s`.padEnd(14)});
}
promises.push(this._pinAllFromContract(contract, itemCount, bar)
.then(res => { cids.push(...res); }));
}
await Promise.all(promises);
// Avoid console output race condition with progress bars finishing update
if (this.progressBars) await sleep(1000);
return cids;
} }
watch (callback) { monitor (callback) {
this.kredits.Contribution.on('ContributionAdded', (id) => { this.kredits.Contribution.on('ContributionAdded', (id) => {
this.kredits.Contribution.getData(id) this.kredits.Contribution.getData(id)
.then(data => { return this.ipfsApi.pin(data); }) .then(data => { return this.ipfsApi.pin(data); })
@ -74,43 +33,19 @@ class IpfsPinner {
}); });
} }
async _pinAllFromContract (contract, itemCount, progressBar) { _pinAllFromContract (contract) {
const ipfsApi = this.ipfsApi; debug(`Pinning data from ${contract.constructor.name}...`);
const progressBars = this.progressBars; return contract.count.then(count => {
const ids = [...Array(itemCount).keys()].map(i => i+1); let promises = [...Array(count).keys()].map(i => {
const cids = []; let id = i + 1; // 0 => 1 - ids start with 1 and not with 0
const batchSize = 20; debug(`Loading ${contract.constructor.name} #${id}`);
let position = 0; return contract.getData(id).then(data => {
debug(`Pinning ${contract.constructor.name} #${id}`);
async function loadAndPin(id) { return this.ipfsApi.pin(data);
let cid; });
});
try { return Promise.all(promises);
const data = await contract.getData(id); });
debug(`Loaded ${contract.constructor.name} #${id}`);
cid = await ipfsApi.pin(data);
debug(`Pinned ${contract.constructor.name} #${id} at ${cid}`);
} catch(e) {
debug(`Error while trying to load an pin ${contract.constructor.name} #${id}:`)
debug(e);
debug(`\nTrying again...`);
loadAndPin(id);
} finally {
cids.push(cid);
if (progressBars) { progressBar.increment(); }
}
}
while (position < itemCount) {
const batchIds = ids.slice(position, position + batchSize);
await Promise.all(batchIds.map(async id => loadAndPin(id)));
position += batchSize;
}
return cids;
} }
} }
module.exports = IpfsPinner; module.exports = IpfsPinner;

4669
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "@kredits/ipfs-pinner", "name": "@kosmos/kredits-ipfs-pinner",
"version": "2.0.0", "version": "1.2.0",
"description": "Pins IPFS data of a Kredits organization on an IPFS node", "description": "Pins IPFS data of a Kredits organization on an IPFS node",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
@ -20,10 +20,9 @@
], ],
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@kredits/contracts": "^7.0.0", "debug": "^4.1.1",
"cli-progress": "^3.11.2", "kredits-contracts": "^5.4.0",
"debug": "^4.3.4", "yargs": "^13.2.4"
"yargs": "^17.6.0"
}, },
"repository": { "repository": {
"type": "git", "type": "git",