Transaction

2e15fc678212e3ca9fbf2205d6aab511e67b8f5e534372441022c37e7ad2321e
( - )
253,285
2019-07-21 09:22:48
1
7,272 B

1 Output

Total Output:
  • jmeta"14RYgCVd947G54a4wFd4NWw2vtETfuRLBv@4c080e02c029c060209e175dc9861238de82cde0885bbb0647b82d735702cf20"19HxigV4QyBv3tHpQVcUEQyq1pzZVdoAutM"import fs from 'fs'; import path from 'path'; import readline from 'readline-promise'; import fetch from 'node-fetch'; import BitIndexSDK from 'bitindex-sdk'; /** * Clones the directory structure represented by the transaction id to the local file system. * Note that at this stage it doesn't create the .bsvpush directory or the metanet.json file. */ export class Clone { metanetApiUrl = 'http://localhost:3000'; bFileType = '19HxigV4QyBv3tHpQVcUEQyq1pzZVdoAut'; bitindex = new BitIndexSDK(); async clone(txid: string) { const node = await this.getMetanetNode(txid); this.cloneRecursive(node, process.cwd()); } async cloneRecursive(node, dir) { if (node.nodeType == this.bFileType) { const filePath = path.join(dir, node.name); console.log(`Cloning file: ${filePath}`) const data = await this.getFileData(node.nodeTxId); fs.writeFileSync(filePath, data); } else { // Create a directory for this node const newDir = path.join(dir, node.name); if (!fs.existsSync(newDir)) { fs.mkdirSync(newDir); } // Get children const children = await this.getChildNodes(node.nodeTxId); for (const child of children) { await this.cloneRecursive(child, newDir) } } } async getMetanetNode(txid: string) { //return (await (await fetch(this.metanetApiUrl + '/tx/' + txid)).json()) const query = { "q": { "find": { "node.tx": txid }, "project": { "node": 1, "out": 1, "out.s4": 1, "out.s8": 1, "parent": 1 } } } const b64 = Buffer.from(JSON.stringify(query)).toString('base64') const url = "https://metanaria.planaria.network/q/" + b64 const response = await fetch(url, { headers: { key: '1DzNX2LzKrmoyYVyqMG46LLknzSd7TUYYP' } }) const json = await response.json() const metanet = json.metanet[0] const metanetNode = { nodeTxId: txid, nodeKey: metanet.node.a, nodeType: metanet.out[0].s4, name: metanet.out[0].s8, parentTxId: null, parentKey: null } if (metanet.out[0].s8) { metanetNode.name = metanet.out[0].s8 } else { metanetNode.name = metanet.out[0].s4 } if (metanet.parent) { metanetNode.parentTxId = metanet.parent.tx metanetNode.parentKey = metanet.parent.a } return metanetNode; } async getChildNodes(txid: string) { //return (await (await fetch(this.metanetApiUrl + '/tx/' + txid + '/children')).json()).children const query = { "q": { "find": { "parent.tx": txid }, "project": { "node": 1, "out": 1, "out.s4": 1, "out.s8": 1 } } } const b64 = Buffer.from(JSON.stringify(query)).toString('base64') //btoa(JSON.stringify(query)) const url = "https://metanaria.planaria.network/q/" + b64 const response = await fetch(url, { headers: { key: '1DzNX2LzKrmoyYVyqMG46LLknzSd7TUYYP' } }) const json = await response.json() const children = [] for (const metanet of json.metanet) { const metanetNode = { nodeTxId: metanet.node.tx, nodeKey: metanet.node.a, nodeType: metanet.out[0].s4, name: metanet.out[0].s8, } if (metanet.out[0].s8) { metanetNode.name = metanet.out[0].s8 } else { metanetNode.name = metanet.out[0].s4 } children.push(metanetNode) } return children; } /* async confirmOverwrite(filePath): Promise<boolean> { let result = true; if (fs.existsSync(filePath)) { const rlp = readline.createInterface({input: process.stdin, output: process.stdout}); const response = await rlp.questionAsync(`The file "${filePath}" exists. \nDo you want to overwrite it? (Y/n)`); rlp.close(); result = response !== 'n' && response !== 'N'; } return result; } */ async getFileData(txid) { const metanetNode = { txid: txid, parts: [], publicKey: '', parentTx: '', type: '', data: '', mediaType: '', encoding: '', name: '' }; const result = await this.bitindex.tx.get(txid) // Get the opReturn const vout = result.vout.find(vout => 'scriptPubKey' in vout && vout.scriptPubKey.type == 'nulldata') if (vout) { metanetNode.parts = this.parseOpReturn(vout.scriptPubKey.hex) // Verify OP_RETURN if (metanetNode.parts[0].toLowerCase() != '6a') throw 'Script of type nulldata is not an OP_RETURN' // Verify metanet tag if (this.fromHex(metanetNode.parts[1]) != 'meta') throw 'OP_RETURN is not of type metanet' metanetNode.publicKey = this.fromHex(metanetNode.parts[2]) metanetNode.parentTx = this.fromHex(metanetNode.parts[3]) metanetNode.type = this.fromHex(metanetNode.parts[4]) if (metanetNode.type == '19HxigV4QyBv3tHpQVcUEQyq1pzZVdoAut') { // Interpret B file metanetNode.data = this.fromHex(metanetNode.parts[5]) metanetNode.mediaType = this.fromHex(metanetNode.parts[6]) metanetNode.encoding = this.fromHex(metanetNode.parts[7]) metanetNode.name = this.fromHex(metanetNode.parts[8]) } else { metanetNode.name = metanetNode.type } } return metanetNode.data } fromHex(s: string): string { return Buffer.from(s, 'hex').toString('utf8'); } // Returns each part as hex string (e.g. 'abcdef') parseOpReturn(hex) { let parts = [] // First part is op return parts.push(hex[0] + hex[1]) let index = 2; while (index < hex.length) { // Get the length let lengthHex = hex[index] + hex[index + 1] index += 2 // Convert length to decimal let length = parseInt(lengthHex, 16) if (length == 76) { // Next 1 byte contains the length lengthHex = hex.substring(index, index + 2) length = parseInt(lengthHex, 16) index += 2 } else if (length == 77) { // Next 2 bytes contains the length, little endian lengthHex = '' for (let i = 0; i < 2; i++) { lengthHex = hex[index + i * 2] + hex[index + i * 2 + 1] + lengthHex } length = parseInt(lengthHex, 16) index += 4 } else if (length == 78) { // Next 4 bytes contains the length, little endian lengthHex = '' for (let i = 0; i < 4; i++) { lengthHex = hex[index + i * 2] + hex[index + i * 2 + 1] + lengthHex } length = parseInt(lengthHex, 16) index += 8 } let data = '' // Read in data for (let i = 0; i < length; i++) { data += hex[index] + hex[index + 1] index += 2 } parts.push(data) } return parts } } export const clone = new Clone();  clone.ts
    https://whatsonchain.com/tx/2e15fc678212e3ca9fbf2205d6aab511e67b8f5e534372441022c37e7ad2321e