View on GitHub
File Changes
    "baseUrl": "http://localhost:8082",
    "template": "testnet2"
  },
-
  "elasticNode": "http://localhost:9200",
+
  "elastic": {
+
    "node": "http://localhost:9200",
+
    "indexPrefix": "seiza"
+
  },
  "storageProcessor": "seiza-elastic",
  "server": {
    "port": 8080,
  },
  "checkTipSeconds": 15,
  "rollbackBlocksCount": 25,
-
  "maxBlockBatchSize": 800,
+
  "maxBlockBatchSize": 900,
  "defaultNetwork": "testnet2",
  "defaultBridgeUrl": "http://localhost:8082",
  "networks": {
+
// flow-typed signature: d313afea12fc960c924825fc728d6a90
+
// flow-typed version: c6154227d1/bunyan_v1.x.x/flow_>=v0.104.x
+

                      
+
declare module 'bunyan' {
+
  declare var TRACE: 10;
+
  declare var DEBUG: 20;
+
  declare var INFO: 30;
+
  declare var WARN: 40;
+
  declare var ERROR: 50;
+
  declare var FATAL: 60;
+

                      
+
  declare type BunyanLogLevels =
+
    | 60 // fatal
+
    | 50 // error
+
    | 40 // warn
+
    | 30 // info
+
    | 20 // debug
+
    | 10; // info
+
  declare type BunyanRecord = {
+
    [key: string]: any,
+
    v: number,
+
    level: BunyanLogLevels,
+
    name: string,
+
    hostname: string,
+
    pid: string,
+
    time: Date,
+
    msg: string,
+
    src: string,
+
    err?: {
+
      message: string,
+
      name: string,
+
      code: any,
+
      signal: any,
+
      stack: string,
+
      ...
+
    },
+
    ...
+
  };
+
  declare type Writable = { write(rec: BunyanRecord): void, ... };
+
  declare class Logger extends events$EventEmitter {
+
    constructor(options: LoggerOptions): any;
+
    addStream(stream: Stream): void;
+
    addSerializers(serializers: Serializers): void;
+
    child(opts?: LoggerOptions, simple?: boolean): Logger;
+
    reopenFileStreams(): void;
+
    level(): string | number;
+
    level(value: number | string): void;
+
    levels(name: number | string, value: number | string): void;
+
    trace(...params: Array<void>): boolean;
+
    trace(error: Error, format?: any, ...params: Array<any>): void;
+
    trace(buffer: Buffer, format?: any, ...params: Array<any>): void;
+
    trace(obj: Object, format?: any, ...params: Array<any>): void;
+
    trace(format: string, ...params: Array<any>): void;
+
    debug(...params: Array<void>): boolean;
+
    debug(error: Error, format?: any, ...params: Array<any>): void;
+
    debug(buffer: Buffer, format?: any, ...params: Array<any>): void;
+
    debug(obj: Object, format?: any, ...params: Array<any>): void;
+
    debug(format: string, ...params: Array<any>): void;
+
    info(...params: Array<void>): boolean;
+
    info(error: Error, format?: any, ...params: Array<any>): void;
+
    info(buffer: Buffer, format?: any, ...params: Array<any>): void;
+
    info(obj: Object, format?: any, ...params: Array<any>): void;
+
    info(format: string, ...params: Array<any>): void;
+
    warn(...params: Array<void>): boolean;
+
    warn(error: Error, format?: any, ...params: Array<any>): void;
+
    warn(buffer: Buffer, format?: any, ...params: Array<any>): void;
+
    warn(obj: Object, format?: any, ...params: Array<any>): void;
+
    warn(format: string, ...params: Array<any>): void;
+
    error(...params: Array<void>): boolean;
+
    error(error: Error, format?: any, ...params: Array<any>): void;
+
    error(buffer: Buffer, format?: any, ...params: Array<any>): void;
+
    error(obj: Object, format?: any, ...params: Array<any>): void;
+
    error(format: string, ...params: Array<any>): void;
+
    fatal(...params: Array<void>): boolean;
+
    fatal(error: Error, format?: any, ...params: Array<any>): void;
+
    fatal(buffer: Buffer, format?: any, ...params: Array<any>): void;
+
    fatal(obj: Object, format?: any, ...params: Array<any>): void;
+
    fatal(format: string, ...params: Array<any>): void;
+
    static stdSerializers: {
+
      req: (
+
        req: http$ClientRequest<>
+
      ) => {
+
        method: string,
+
        url: string,
+
        headers: mixed,
+
        remoteAddress: string,
+
        remotePort: number,
+
        ...
+
      },
+
      res: (
+
        res: http$IncomingMessage<>
+
      ) => {
+
        statusCode: number,
+
        header: string,
+
        ...
+
      },
+
      err: (
+
        err: Error
+
      ) => {
+
        message: string,
+
        name: string,
+
        stack: string,
+
        code: string,
+
        signal: string,
+
        ...
+
      },
+
      ...
+
    };
+
  }
+
  declare interface LoggerOptions {
+
    streams?: Array<Stream>;
+
    level?: BunyanLogLevels | string;
+
    stream?: stream$Writable;
+
    serializers?: Serializers;
+
    src?: boolean;
+
  }
+
  declare type Serializers = { [key: string]: (input: any) => mixed, ... };
+
  declare type Stream = {
+
    type?: string,
+
    level?: number | string,
+
    path?: string,
+
    stream?: stream$Writable | tty$WriteStream | Stream | Writable,
+
    closeOnExit?: boolean,
+
    period?: string,
+
    count?: number,
+
    ...
+
  };
+
  declare var stdSerializers: Serializers;
+
  declare function resolveLevel(value: number | string): number;
+
  declare function createLogger(
+
    options: LoggerOptions & { name: string, ... }
+
  ): Logger;
+
  declare class RingBuffer extends events$EventEmitter {
+
    constructor(options: RingBufferOptions): any;
+
    writable: boolean;
+
    records: Array<any>;
+
    write(record: BunyanRecord): void;
+
    end(record?: any): void;
+
    destroy(): void;
+
    destroySoon(): void;
+
  }
+
  declare interface RingBufferOptions {
+
    limit: number;
+
  }
+
  declare function safeCycles(): (key: string, value: any) => any;
+
  declare class ConsoleRawStream {
+
    write(rec: BunyanRecord): void;
+
  }
+
  declare var levelFromName: {
+
    trace: typeof TRACE,
+
    debug: typeof DEBUG,
+
    info: typeof INFO,
+
    warn: typeof WARN,
+
    error: typeof ERROR,
+
    fatal: typeof FATAL,
+
    ...
+
  };
+
  declare var nameFromLevel: { [key: BunyanLogLevels]: string, ... };
+
  declare var VERSION: string;
+
  declare var LOG_VERSION: string;
+
}
m
+3/-1
// @flow

                      
+
import type { Logger } from 'bunyan'
+

                      
import cbor from 'cbor'
import bs58 from 'bs58'
import blake from 'blakejs'
import { injectable, decorate, inject } from 'inversify'

                      
import {
-
  Logger, RawDataProvider, StorageProcessor, NetworkConfig,
+
  RawDataProvider, StorageProcessor, NetworkConfig,
} from '../interfaces'
import SERVICE_IDENTIFIER from '../constants/identifiers'
import utils from '../blockchain/utils'
const GET_UTXOS_BLOCKS_COUNT = sql.select()
  .field('(select count(*) from utxos ) + ( select count(*) from blocks) as cnt')

                      
-

                      
-
const utxoOnConflictUpdateBlockNum = (query) => {
-
  // squel don't support 'EXCLUDED'
-
  // workaround taken from https://github.com/hiddentao/squel/issues/342
-
  const onConflictClause = ' ON CONFLICT (utxo_id) DO UPDATE SET block_num = EXCLUDED.block_num'
-
  const queryParam = query.toParam()
-
  queryParam.text += onConflictClause
-
  return queryParam
-
}
-

                      
export default {
  sql,
-
  utxoOnConflictUpdateBlockNum,
  UTXOS_INSERT,
  GET_BEST_BLOCK_NUM,
  BEST_BLOCK_UPDATE,
// @flow

                      
import _ from 'lodash'
+
import type { Logger } from 'bunyan'
import { helpers } from 'inversify-vanillajs-helpers'

                      
import type {
  Scheduler,
  RawDataProvider,
-
  Logger,
  StorageProcessor,
} from '../interfaces'
import SERVICE_IDENTIFIER from '../constants/identifiers'

                      
  storageProcessor: StorageProcessor

                      
-
  #logger: any
+
  logger: Logger

                      
  checkTipMillis: number

                      
    this.maxBlockBatchSize = maxBlockBatchSize
    logger.debug('Checking tip every', checkTipSeconds, 'seconds')
    logger.debug('Rollback blocks count', rollbackBlocksCount)
-
    this.#logger = logger
+
    this.logger = logger
    this.blocksToStore = []
    this.lastBlock = null
  }

                      
  async rollback(atBlockHeight: number) {
-
    this.#logger.info(`Rollback at height ${atBlockHeight} to ${this.rollbackBlocksCount} blocks back.`)
+
    this.logger.info(`Rollback at height ${atBlockHeight} to ${this.rollbackBlocksCount} blocks back.`)
    // reset scheduler state
    this.blocksToStore = []
    this.lastBlock = null

                      
    // Recover database state to newest actual block.
    const { height } = await this.storageProcessor.getBestBlockNum()
    const rollBackTo = height - this.rollbackBlocksCount
-
    this.#logger.info(`Current DB height at rollback time: ${height}. Rolling back to: ${rollBackTo}`)
+
    this.logger.info(`Current DB height at rollback time: ${height}. Rolling back to: ${rollBackTo}`)
    await this.storageProcessor.rollbackTo(rollBackTo)
    const { epoch, hash } = await this.storageProcessor.getBestBlockNum()
    this.lastBlock = { epoch, hash }
  }

                      

                      
  async processEpochId(id: number, height: number) {
-
    this.#logger.info(`processEpochId: ${id}, ${height}`)
+
    this.logger.info(`processEpochId: ${id}, ${height}`)
    const omitEbb = true
    const blocks = await this.#dataProvider.getParsedEpochById(id, omitEbb)
    for (const block of blocks) {
      && block.epoch === this.lastBlock.epoch
      && block.prevHash !== this.lastBlock.hash) {
      const lastBlockHash = this.lastBlock ? this.lastBlock.hash : ''
-
      this.#logger.info(`(${block.epoch}/${String(block.slot)}) block.prevHash (${block.prevHash}) !== lastBlock.hash (${lastBlockHash}). Performing rollback...`)
+
      this.logger.info(`(${block.epoch}/${String(block.slot)}) block.prevHash (${block.prevHash}) !== lastBlock.hash (${lastBlockHash}). Performing rollback...`)
      return STATUS_ROLLBACK_REQUIRED
    }
    this.lastBlock = {
    }

                      
    if (flushCache || block.height % LOG_BLOCK_PARSED_THRESHOLD === 0) {
-
      this.#logger.debug(`Block parsed: ${block.hash} ${block.epoch} ${String(block.slot)} ${block.height}`)
+
      this.logger.debug(`Block parsed: ${block.hash} ${block.epoch} ${String(block.slot)} ${block.height}`)
    }
    return BLOCK_STATUS_PROCESSED
  }

                      
  async checkTip() {
-
    this.#logger.info('checkTip: checking for new blocks...')
+
    this.logger.info('checkTip: checking for new blocks...')
    // local state
    const { height, epoch, slot } = await this.storageProcessor.getBestBlockNum()

                      
    const tipStatus = nodeTip.local
    const remoteStatus = nodeTip.remote
    if (!tipStatus) {
-
      this.#logger.info('cardano-http-brdige not yet synced')
+
      this.logger.info('cardano-http-brdige not yet synced')
      return
    }
-
    this.#logger.debug(`Last imported block ${height}. Node status: local=${tipStatus.slot} remote=${remoteStatus.slot} packedEpochs=${packedEpochs}`)
+
    this.logger.debug(`Last imported block ${height}. Node status: local=${tipStatus.slot} remote=${remoteStatus.slot} packedEpochs=${packedEpochs}`)
    const [remEpoch, remSlot] = remoteStatus.slot
    if (epoch < remEpoch) {
      // If local epoch is lower than the current network tip
          for (const epochId of _.range(epoch, packedEpochs)) {
            // Process epoch
            await this.processEpochId(epochId, height)
-
            this.#logger.debug(`Epoch parsed: ${epochId}, ${height}`)
+
            this.logger.debug(`Epoch parsed: ${epochId}, ${height}`)
          }
        } else {
          // Packed epoch is not available yet
-
          this.#logger.info(`cardano-http-brdige has not yet packed stable epoch: ${epoch} (lastRemStableEpoch=${lastRemStableEpoch})`)
+
          this.logger.info(`cardano-http-brdige has not yet packed stable epoch: ${epoch} (lastRemStableEpoch=${lastRemStableEpoch})`)
        }
        return
      }
      blockHeight++, i++) {
      const status = await this.processBlockHeight(blockHeight)
      if (status === STATUS_ROLLBACK_REQUIRED) {
-
        this.#logger.info('Rollback required.')
+
        this.logger.info('Rollback required.')
        await this.rollback(blockHeight)
        return
      }
    }
  }

                      
  async startAsync() {
-
    this.#logger.info('Scheduler async: starting chain syncing loop')
+
    this.logger.info('Scheduler async: starting chain syncing loop')
    const currentMillis = () => new Date().getTime()
    const sleep = millis => new Promise(resolve => setTimeout(resolve, millis))
    for (;;) {
        const meta = ERROR_META[e.name]
        if (meta) {
          errorSleep = meta.sleep
-
          this.#logger.warn(`Scheduler async: failed to check tip :: ${meta.msg}. Sleeping and retrying (err_sleep=${errorSleep})`)
+
          this.logger.warn(`Scheduler async: failed to check tip :: ${meta.msg}. Sleeping and retrying (err_sleep=${errorSleep})`)
        } else {
          throw e
        }
      }
      const millisEnd = currentMillis()
      const millisPassed = millisEnd - millisStart
-
      this.#logger.debug(`Scheduler async: loop finished (millisPassed=${millisPassed})`)
+
      this.logger.debug(`Scheduler async: loop finished (millisPassed=${millisPassed})`)
      const millisSleep = errorSleep || (this.checkTipMillis - millisPassed)
      if (millisSleep > 0) {
-
        this.#logger.debug('Scheduler async: sleeping for', millisSleep)
+
        this.logger.debug('Scheduler async: sleeping for', millisSleep)
        await sleep(millisSleep)
      }
    }

                      
import ElasticData, { coinFormat } from './elastic-data'
import type { UtxoType } from './utxo-data'
-
import UtxoData from './utxo-data'
+
import TxData from './tx-data'

                      
class BlockData extends ElasticData {
  block: Block

                      
  utxos: Array<mixed>

                      
-
  blockUtxos: Array<{id: string}>
-

                      
  storedUTxOs: Array<UtxoType>

                      
  allUtxos: {}

                      
  constructor(block: Block, storedUTxOs: Array<UtxoType> = []) {
    super()
+
    this.inputsData = []
    this.block = block
    this.storedUTxOs = storedUTxOs
    const txs = block.getTxs()
-
    this.blockUtxos = txs.flatMap(tx => tx.outputs.map(
-
      (out, idx) => (new UtxoData({
-
        tx_hash: tx.id,
-
        tx_index: idx,
-
        receiver: out.address,
-
        amount: out.value,
-
      })).toPlainObject(),
-
    ))
+

                      
    this.allUtxos = _.keyBy([
      ...this.storedUTxOs,
-
      ...this.blockUtxos,
    ], u => `${u.tx_hash}${u.io_ordinal}`)

                      
-
    this.inputsData = _.flatMap(txs, 'inputs')
-
      .flatMap(inp => this.allUtxos[`${inp.txId}${inp.idx}`])
-
  }
-

                      
-
  getBlockUtxos(): Array<{id: string}> {
-
    return this.blockUtxos
+
    if (!_.isEmpty(txs)) {
+
      this.inputsData = _.flatMap(txs, 'inputs')
+
        .flatMap(inp => this.allUtxos[`${inp.txId}${inp.idx}`])
+
    }
  }

                      
  getReceivedAmount(): number {
    return sent
  }

                      
+
  getTxsData() {
+
    const txs = this.block.getTxs()
+
    return txs.map(tx => (new TxData(tx, this.allUtxos)).toPlainObject())
+
  }
+

                      
  getFees(): number {
    const sentAmount = this.getSentAmount()
    const receivedAmount = this.getReceivedAmount()
    const time = this.block.getTime().toISOString()
    let sent = 0
    let fees = 0
-
    if (this.block.getTxs().length > 0) {
+
    const txs = this.block.getTxs()
+
    if (txs.length > 0) {
      sent = this.getSentAmount()
      fees = this.getFees()
    }
      height: this.block.height,
      time,
      branch: 0,
-
      tx_num: this.block.txs.length,
+
      tx_num: txs.length,
+
      tx: this.getTxsData(),
      sent: coinFormat(sent),
      fees: coinFormat(fees),
    }
// @flow

                      
import _ from 'lodash'
+
import type { Logger } from 'bunyan'

                      
import { helpers } from 'inversify-vanillajs-helpers'
import { Client } from '@elastic/elasticsearch'

                      
-
import type { StorageProcessor, Logger, NetworkConfig } from '../../interfaces'
+
import type { StorageProcessor, NetworkConfig } from '../../interfaces'
import type { Block } from '../../blockchain'
import type { BlockInfoType } from '../../interfaces/storage-processor'
import SERVICE_IDENTIFIER from '../../constants/identifiers'
import UtxoData, { getTxInputUtxoId } from './utxo-data'
import TxData from './tx-data'

                      
-
const INDEX_SLOT = 'seiza.slot'
-
const INDEX_TX = 'seiza.tx'
-
const INDEX_TXIO = 'seiza.txio'
+
const INDEX_SLOT = 'slot'
+
const INDEX_TX = 'tx'
+
const INDEX_TXIO = 'txio'
+
const INDEX_CHUNK = 'chunk'
+
const INDEX_POINTER_ALL = '*'
+

                      
+

                      
+
const ELASTIC_TEMPLATES = {
+
  seiza_tx: {
+
    index_patterns: ['seiza*.tx'],
+
    mappings: {
+
      properties: {
+
        addresses: {
+
          type: 'nested',
+
        },
+
      },
+
    },
+
  },
+
}
+

                      
+
type ElasticConfigType = {
+
  node: string,
+
  indexPrefix: string,
+
}
+

                      
+
type ChunkBodyType = {
+
  chunk: number,
+
  blocks: number,
+
  txs: number,
+
  txios: number,
+
}
+

                      
+

                      
+
type FormatBulkUploadOptionsType = {
+
  index: string,
+
  getId?: (any) => string,
+
  getData: (any) => {},
+
}
+

                      
+
const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms))
+

                      
+
const formatBulkUploadBody = (objs: any,
+
  options: FormatBulkUploadOptionsType) => objs.flatMap(o => [
+
  {
+
    index: {
+
      _index: options.index,
+
      _id: options.getId !== undefined ? options.getId(o) : o.getId(),
+
    },
+
  },
+
  options.getData(o),
+
])

                      
class ElasticStorageProcessor implements StorageProcessor {
  logger: Logger

                      
  networkStartTime: number

                      
+
  elasticConfig: ElasticConfigType
+

                      
  constructor(
    logger: Logger,
-
    elasticNode: string,
+
    elasticConfig: ElasticConfigType,
    networkConfig: NetworkConfig,
  ) {
    this.logger = logger
-
    this.client = new Client({ node: elasticNode })
+
    this.elasticConfig = elasticConfig
+
    this.client = new Client({ node: elasticConfig.node })
    this.networkStartTime = networkConfig.startTime()
  }

                      
+
  indexFor(name: string) {
+
    // TOFO: memoize
+
    return `${this.elasticConfig.indexPrefix}.${name}`
+
  }
+

                      
+
  async rollbackTo(height: number) {
+
    await sleep(10000)
+
    const latestStableChunk = await this.getLatestStableChunk()
+
    return this.deleteChunksAfter(Math.min(latestStableChunk, height))
+
  }
+

                      
+

                      
+
  async esSearch(params: {}) {
+
    const resp = await this.client.search(params)
+
    const { hits } = resp.body
+
    return hits
+
  }
+

                      
+
  async getLatestStableChunk() {
+
    const index = this.indexFor(INDEX_CHUNK)
+
    const indexExists = (await this.client.indices.exists({
+
      index,
+
    })).body
+
    if (!indexExists) {
+
      return 0
+
    }
+
    const hits = await this.esSearch({
+
      index,
+
      allowNoIndices: true,
+
      ignoreUnavailable: true,
+
      body: {
+
        sort: [{ chunk: { order: 'desc' } }],
+
        size: 1,
+
      },
+
    })
+
    this.logger.debug('getLatestStableChunk', hits)
+
    return hits.total.value > 0 ? hits.hits[0]._source.chunk : 0
+
  }
+

                      
+
  async deleteChunksAfter(chunk: number) {
+
    const resp = await this.client.deleteByQuery({
+
      index: this.indexFor(INDEX_POINTER_ALL),
+
      body: {
+
        query: { range: { _chunk: { gt: chunk } } },
+
      },
+
    })
+
    const deletedDocs = resp.body.total
+
    this.logger.info(`deleteChunksAfter(${chunk}), total deleted:${deletedDocs}`, resp)
+
  }
+

                      
+
  async ensureElasticTemplates() {
+
    for (const [name, tmpl] of _.toPairs(ELASTIC_TEMPLATES)) {
+
      // eslint-disable-next-line no-await-in-loop
+
      const tmplExists = await this.client.indices.existsTemplate({
+
        name,
+
      })
+
      if (!tmplExists.body) {
+
        // eslint-disable-next-line no-await-in-loop
+
        const resp = await this.client.indices.putTemplate({
+
          name,
+
          body: tmpl,
+
          include_type_name: false,
+
        })
+
        this.logger.debug(`Put template ${name}`, resp)
+
      }
+
    }
+
  }
+

                      
+
  async storeChunk(chunkBody: ChunkBodyType) {
+
    return this.client.index({
+
      index: this.indexFor(INDEX_CHUNK),
+
      id: chunkBody.chunk,
+
      body: chunkBody,
+
    })
+
  }
+

                      
+
  async removeUnsealed() {
+
    const lastChunk = await this.getLatestStableChunk()
+
    this.logger.debug('Remove unsealed blocks after', lastChunk)
+
    if (lastChunk > 0) {
+
      await this.deleteChunksAfter(lastChunk)
+
    }
+
  }
+

                      
  async genesisLoaded() {
+
    await this.ensureElasticTemplates()
+
    await this.removeUnsealed()
+
    const index = this.indexFor(INDEX_TX)
+
    const indexExists = (await this.client.indices.exists({
+
      index,
+
    })).body
+
    if (!indexExists) {
+
      return false
+
    }
    const esResponse = await this.client.cat.count({
-
      index: INDEX_TX,
+
      index,
      format: 'json',
    })
    this.logger.debug('Check elastic whether genesis loaded...', esResponse)
    return Number(esResponse.body[0].count) > 0
  }

                      
  async storeGenesisUtxos(utxos: Array<UtxoType>) {
+
    // TODO: check bulk upload response
+

                      
+
// @flow
+

                      
+
import UtxoData from './utxo-data'
+
import type { UtxoType } from './utxo-data'
+

                      
+
const INPUT_TYPE = 'input'
+

                      
+
class InputData extends UtxoData {
+
  constructor(input, index, inputUtxo, tx) {
+
    super({
+
      tx_hash: tx.id,
+
      tx_index: index,
+
      amount: inputUtxo.value.full,
+
      receiver: inputUtxo.address,
+
    })
+
    this.type = INPUT_TYPE
+
  }
+
}
+

                      
+
export default InputData
import type { TxType } from '../../blockchain'

                      
import ElasticData from './elastic-data'
-
import type UtxoData from './utxo-data'
+
import UtxoData from './utxo-data'
+
import InputData from './input-data'

                      
class TxData extends ElasticData {
  tx: TxType

                      
-
  constructor(tx: TxType) {
+
  inputsUtxos: {}
+

                      
+
  constructor(tx: TxType, inputsUtxos: {} = {}) {
    super()
    this.tx = tx
+
    this.inputsUtxos = inputsUtxos
  }

                      
-
  static fromGenesisUtxo(utxo: UtxoData, networkStartTime: number) {
+
  static fromGenesisUtxo(utxo: any, networkStartTime: number) {
    return new TxData({
      isGnesis: true,
      blockHash: null,
      txOrdinal: 0,
      txTime: new Date(networkStartTime * 1000),
      witnesses: [],
-
      id: utxo.getHash(),
+
      id: utxo.tx_hash,
      branch: 0,
      outputs: [
-
        utxo.toPlainObject(),
+
        utxo,
      ],
    })
  }

                      
+
  getOutputsData() {
+
    return this.tx.outputs.map((utxo, idx) => (new UtxoData({
+
      address: utxo.address,
+
      amount: utxo.value,
+
      tx_index: idx,
+
      tx_hash: this.tx.id,
+
    })).toPlainObject())
+
  }
+

                      
+
  getInputsData() {
+
    return this.tx.inputs.map((inp, idx) => {
+
      const inputUtxo = this.inputsUtxos[`${inp.txId}${inp.idx}`]
+
      return (new InputData(inp, idx, inputUtxo, this.tx)).toPlainObject()
+
    })
+
  }
+

                      
  toPlainObject() {
    return {
      ...TxData.getBaseFields(),
      is_genesis: this.tx.isGenesis || false,
      hash: this.tx.id,
-
      outputs: this.tx.outputs,
+
      outputs: this.getOutputsData(),
+
      inputs: this.getInputsData(),
      time: this.tx.txTime.toISOString(),
    }
  }
      tx_hash: this.utxo.tx_hash,
      branch: 0,
      tx_ordinal: 0,
-
      io_ordinal: 0,
+
      io_ordinal: this.utxo.tx_index,
      address: this.utxo.receiver,
      value: coinFormat(Number(this.utxo.amount)),
    }
    this.#logger = logger
  }

                      
-
  nonAvvmBalancesToUtxos(nonAvvmBalances) {
+
  nonAvvmBalancesToUtxos(nonAvvmBalances: []) {
    this.#logger.debug('nonAvvmBalances to utxos')
    return _.map(nonAvvmBalances, (amount, receiver) => {
      const utxoHash = generateUtxoHash(receiver)
      return utils.structUtxo(receiver, amount, utxoHash)
    })
  }

                      
-
  avvmDistrToUtxos(avvmDistr, protocolMagic) {
+
  avvmDistrToUtxos(avvmDistr: [], protocolMagic: number) {
    this.#logger.debug('avvmDistrToUtxos called.')
    const settings = Cardano.BlockchainSettings.from_json({
      protocol_magic: protocolMagic,

                      
export interface Database {
  getBestBlockNum(): any;
+
  storeUtxos(utxos: Array<mixed>): Promise<any>;

                      
  getConn(): any;

                      
// @flow

                      
export interface RawDataParser {
-
  parseBlock(data: string): any;
-
  parseEpoch(data: string): any;
+
  parseBlock(data: Buffer): any;
+
  parseEpoch(data: Buffer): any;
}
export interface RawDataProvider {
  getBlock(id: string): Promise<string>;
  getEpoch(id: number): Promise<string>;
+
  postSignedTx(payload: string): Promise<any>;
}

                      
export type BlockInfoType = {
  height: number,
-
  epoch?: number,
+
  epoch: number,
  slot?: number,
  hash?: string,
}

                      
export interface StorageProcessor {
  getBestBlockNum(): Promise<BlockInfoType>;

                      
-
  storeBlocksData(blocks: Array<Block>): Promise<void>
+
  storeBlocksData(blocks: Array<Block>): Promise<void>;
+

                      
+
  rollbackTo(height: number): Promise<void>
}
m
+8/-1

                      
import config from 'config'

                      
-
const getNetworkConfig = (networkName): {} => {
+
type NetworkConfigType = {
+
  bridgeUrl: string,
+
  genesis: string,
+
  startTime: number,
+
  networkMagic: number,
+
}
+

                      
+
const getNetworkConfig = (networkName: string): NetworkConfigType => {
  const network = { ...config.get('networks')[networkName] }
  network.bridgeUrl = network.bridgeUrl || config.get('defaultBridgeUrl')
  return network