Skip to content

Commit

Permalink
Merge pull request #107 from makerdao/staging
Browse files Browse the repository at this point in the history
staging => prod
  • Loading branch information
rmulhol authored Jul 16, 2020
2 parents af5374c + 419fc76 commit a6bcf75
Show file tree
Hide file tree
Showing 123 changed files with 2,354 additions and 3,788 deletions.
3 changes: 3 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,6 @@ test_config
.travis.yml
vulcanizedb.log
Dockerfile
/tmp/execute_health_check
/tmp/header_sync_health_check
/tmp/connection
15 changes: 2 additions & 13 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
dist: trusty
language: go
go:
- 1.12
- 1.14
services:
- postgresql
- docker
Expand All @@ -24,17 +24,6 @@ deploy:
- provider: script
script: bash ./.travis/deploy.sh prod
on:
branch: master
branch: prod
notifications:
email: false
env:
matrix:
secure: Bm0u6E0esyVjJ2SMg5kVKkTJI+q0vSTOlRUFsfYQG6xAPqaxPH3iXiIwtUIuWK/pKwGtMsXDTiLuoFFASdu+AWRto2I9FisgO/foZhnsfHc1wBLOj1PCukwYl1LUPrS21QAigoZ24YQi1PHx1INjjSX+2cAIoSsj1LD74L6GcwxZi4A0yh3Ntmtv5ttn6mtDr3GeKh6iaw2vS9OeHRUb4ARlz+pxgNULyYF8AR5cKp2gDX8SrDCikwjJm/i2fKOTfWSxKD/EenaeHKciBnakDAWGs90EOhNlakM5DGFhgGhGKKL/DTImb56VfQfOAJEgxIdNPSUPUJV6WLBGhRKOawfZem/8bV54j89QfVg2H4wKtU62OA4eT5IRKiyTPh1sk7QfVZAHWDnmjUsThaXIErNIHOwEMU+uaXY1bOgXBYG2dqsE0rOGZ+TxzrIzOVAr/4pU6b2qmI60OrjnX9dgX9jcm79FHiLtBYkuu12MrYUcuOt2n8bsqeZ/GkrcaEL25hQghaDsziWkthqciierTwzXZrleLARARjitv04aOV0/FQrawkxEysQXPVklWv9JVlk63+BGp6Wb5YVSDKfCYetPF9e31nwG8gIrglFpE5hgAycUkA6ms+EOJ+6UrUBRQt5JxCaxJ58n7QHOFscRWGfW90oIQ3jftyscKERMn/c=
global:
- secure: tz62S53XiFomz/R/X0Rl143KLugl/cBCP8C2us5v5qbJhqbVrOUXPKsK6HZThl20S8EqHC/yaXUWrL6+mhTFH3Ee4GILvchMpAHfqOWYZOWtxcgoO/2UcOKzjBORMJK/Oosp/OAYrR86Om1T4P1gatsbIm90dnE6ausBsNxLtCjCr+Ozbx4yDJ37X327o4uvm+0O0+SbxJw8/aVgtNUJDXqNp1CjwtXwrUTH6PbxNiRGgFrZ24lSS0+0ESCJ5GVZSVSWKDUf4ML5gLCA5/JxKooZisUpbbtHPa9CYzbK8jF4eSYbWrg9W3zJ5Xv2+fN7uFBut/DJbg9RWboSo2txWcv/z1Jdao4q0boOgGgYGf8pphKsx2oNVc5ZKgnygAlN1wma7dwddfiTh3u4WB3zzWbjgj0ONhYkMFhKOjMLqNdAdNjOfFUZB2+Xf/FPUKc5MfL0b2GWpEf/sKhJ1xkf8ZpQgbiyCpaDDIQHoGUsrFVQKk8/frEsLWgdJQlZciuNHDThdHntEQx38EcvotKWss5Mc2tcF+HNr32ob0PuQl6aHkuAX9JwR9hYg6UcoG3MTcCta/e4kZ0PmcltXzbvVVM0+NzcQv6vVsBqH3CNY+Uhhd2uIYluescJNhwfK5g+PHDPgQsT0iGJTXDVmR0H658joBTNniYBsWujJ0o6rSg=
- secure: qJYh+sEjGmDb4SyXnHkvu+1JEYExaRDbapwVdfx/U8e+AK7ZNno4wsHEEA8aMZVkNW7JOfsRjx3Dp1B6Rgd/2QH6J40yz7hMbCZWbres7KnBD3ib8iwHT5TeQ+DndNZh5wDGgNkxgyDWB05fdtAVu6fH77dWaiCkvejI/Jsl0HSUyv+B+TeoC4SkgQgfWSiHOdQ6Q47LVgmL7Yu64+AwwGBawqFvwZdy6pYk0nJtmliYxA3Uaucaml2O1U/fR4bYejzpqz6mnJsuSsLjZKuzrjPFwRVtpc1MsKYyX/Vs7DYnETi+fbCqxB6zbJjelsKMsq83tN6Gk+zk49AWPehQ/UAE3HdOdJo31JJ5MUt8DLL5oczDdTWIaRiNa+Oidoemmf+8w1TwUQavgBe3ubMwPtBf/m+jPUDTpCsykYjwQDyPCl8W3vI+rxgXQcwIn9v0JCdcM+lmPaQ0v41WXxEBxbQriYHeOAbRyU9RR68rnvQQ7Ly2yUne2onEASW8dEGepelvk4Wddc7Oa58fOXzWK2pM1g0K2MtR1weLsRQiDrI0IFQqXJQzXsrXLoYkSeU/FP/j9RP21sd7b4gXcnSRuV3Z6PeoAgbGegrrIQm3bnFdxZs47mov8c01cG7ExdhT78dp1TvKe90JI82DYK/aZOEK9jKoT+sbZsHpojVxOJI=
- secure: c9L/ZJY5JmPlDDlwDPh+guvzY07FUAr3Kh6xenWalzbVzRscFyUtaJTFpjaxzQmI4ICWXQh08A7kEpE/7nd87raBaLRj29KkCjHgKWWgV0x/30Uzu15GiKl7BbhiAukRLTT84abtb7KqTxkBsXyGLIn/xBszE9cNPRXiFb84EEGlNNb9saDV+e8fir3quHxdwCWGHzfl/Li8n5pY/v3wIioDnGU20O16QtL0vTChI8jLaZxvdwB52Je8YgzFPEPz0bJbmHHdV892VPI3P6XyWBJ2Dz94a0tOrFUCBDXFG7rZUBOXcJwtJFNn9c8jAGXdaz7BhAGICAHRqcnxf4N/d10I7cZM8IXQXPb6U1T93KUvn8cKyfPGiSRkmxuc8zkrkRPBVzqDuIFvjOfZ+RkmAQF8+3SnjFuMFWrCDUPy/WkU3sa6JbW4cFkMtc03E8zpzVmeB3MxMbwAtO+wohg83/YTNe6l2sBcMVBxn3zfUAn+uen3KJ3xlXShkMgYFYnhJ7Y26M+kMHuZ1uR8t9Htsx7dCOJM/0IPMTKVk4AhB0vzE3h2SMD91EdScYjmGXrr/xCuGxKeJawow3+CAQZlgq5QwUMsYo2yEDrqm7/FEQbilkVEtdi0V15oL9EEnyOiDWnS6Fjp2R+CN7oKtJU2EYbFXTEmlFtuh4KG75Jmn/E=
- secure: kh8o0PUc5zQfvQ1zaX9mKY+EJW5T9PWxGbVTR/ui30yh6UlSjAYHW3dySUT6kqfuZW8CPsSbFQg5DOgSJquYXf2CzHYdb9pUWgX+7bwUCRArndDS84JpgHw8Am9NHfJzegs2+gOX8ul+pbnc4xkYsBaLzh3paIQAr4IZbfqvSfiQiqNKtcAxB2j2ZYBbhxG7pLHmYIp3rB4UUzoMO7jdcPxA2+YxoNJVYjekayk/LFfmpP70BTe9rKCJkyaap+/kl+lAyVR0f1hyRYbwQVOrLRLWga5avYZetFV5XyrqLFwBghfnzDOGWH9J8/iRXWKoWYTO+1G4Cka/74UCqCoYrm5mi/j3GFX0KRjgX7vxcyWjab8uu6L2ictRQq8wP2gE+wuftySDgCtqf2rijsUGFtiA3h2sqUPtKnQD9Zu+e4XnuyHJPtJ1jwviBUgx1N1t5uskx1h5VS+xVG4gc+9zQq3LI+RYOOlVyWfDhK0WxVPbtwH/s9nFibVnNdA2X6fs409toEq9aEbdg9I9Xtz3U6qPYM88BMC6FnrySFFydNxfKCtVldUQmA5Tj3mCexbeTMRS57WmXZLRlWIIisFkTKNYUP9x8RtwOM1OtbiM9HyVs4l7rBIw7iVmidEdb5PYlfFfK7sc32qrQtz5Pr+e9y6ogAMK3u7WEl5EP/ozeOI=
- secure: cmszNhIpQ7j+MKvs9sEUKTBtoJDYb4wh1seFvJ6JhZw8RL3/y1XWPFBdOFtG9rKLgCj3UxT3+S634gkI4bqJUZUbhZ9HyZBEy6E+JkKHadRPaL6+n0ICcak9ZgZLR/pzN1Bt3KG/vpqSDNr2wkykmaDb81RZy/4UIB74UYFrwi+f03jGZf1cKIHh/y8BONnP7mBTrHHweUnleXpUeAJGzDWW/FckkkAQ7s0BR0xM2WnA7FQNz9jDVhNMkhqCZqnebOIdBx6ufVvIR0nW9xI51Qfj9NJCotvH4Wv2PNdXE9i7+yv7b36cMyj6zNmrGM15l452GHbn+d1ZQugaOajGOuj8MV1Rdo+tTv/YSIAm/2+cN1HM0s0b9Q3WMqBZLsyiNcKB4QBIA/8tqKsgtcvIsPdm0AVya+YFImR4xe4rVxgo5KMfJJ/E4FyxgxnVW22uI6zGHrEgxq3XIIIlOyl/fIijTw/C3kouK3YjiDMlkVqYMw4n7zG1/JZqUX4Ji7CbAsMXJQrR3mj3jmPbLTgnnGuUcplFi5ACqdm6OgAYpjGFgQ6JuooY29yjOR3ZdKSFgVEa6EtM+ogw641+tAyC/ejOcIj7TlJqYlTPulDXgi0dsdmnmltnkBYbdAAekdBKCUUh3FQM/CppTLph7qjqLApi2RsxkeWFwucASmjCBJY=
- secure: hwZs74XuEetCC9cHGiLFYzmmGARYBQfON/ikR+ZPlfYIVo5oUyD1urf5DhEyjkQKLWpp+4u5tIH7tfImy3FFX9ZBQk3cnw/SfsFeyMgK0vrGVh3ErVPV9Tk1Ebluf+maF3i7TiU06/+JkqzpBstrazdFuAqjHTjKYzddujqZmXZyw1+HQXdBrsXedKWeXAmrSp9+sQ3TiBqHjOSagQJdAxscTrNKkut1fesn6tycjBAqKw1k6shhQwQwyNo38or8ewEWbCejqPOrE/ONIOVo7HSl1WKWbXFpfu4IStPKUXxXUdM5as3oXOLKp0jmNq9tv8KgygGWTaXOeDX6fr/cSpY23vHCZl6m5e0aiZBW65zQx5Yj1xMiiuyAtIUXSzLZSdYqXoEV2cYfxQFxTkGT2S3l8c+f7ZNhjauPglPwP5mRfZuhln6h6pdVBoGGI152LGOnAqTWwju+0zudvXTbwfZOk2KVb1zysxAleJVEm4hJoxwLeZLVOJn7kEWDjXNT6+M1L6q5zsDyjA418RL/MElDjqDsMVvJXFHB3wGCqr+KhbTz3cjLMifUnMeD52j3hTXUgzmqpzHAS4yrt8M6j8meQxDfO5WHj1NHF6xv5afqjPiPeCJNCnPw2116RnyWWFn5DSa05zmo8Wvi8+IRcCjB0eILfQoWYcwGW9MvlkA=
- secure: XYufEAOLMgd9gwC//F4QuPZCVn/orkpm+eBOAAiKEBpIPTmrV4ZhWYZdFzd18v4/mQA026+hr4W77HHe5eEyP8T2A20hh2r+Zl4Ou20Er2dV+Dja2bVX8GzPTSgKjEkuJgJGTUZX+JfdC2Otf82PO4u+j7/ay8LgKLrPatpqbC8nb5tsQ3NVPj4uXaK8nByjcoAygMMlobsEIA6w45AtvwU+Y/t5m0uyqFyt775Tme5Z7S2ppANMXLgJsDHIi/QOHNRAu27+F7Bi9/6lDhJwN8Nf4f8xsb/CnMqfzgX02h/VoSnerJe1cwFWGnV1g7kcVAACXSSxng+PqVNTf4ll9O4BnEg46KtxK0SkXXlt+gnL8b67PoqoziR+wst0Wwi5XanHNo+ra006bavmH7dkv6bDR8K3JSPAktq50fbdWRG6Z5TsDvLK75GjAFgtI6qLw9u6PKPd8wir0AZOY1XJxzN8GGBLYwN46dzoviHoyCX/Q0uIcQIAem/eVSITnERUx4uv02H8geJ2J+pWrqTYdc+QjV80wgdjOWyPvHt1vj4d1bypMokOuXEcAXdUblr+fP6r5mqjUiIhXfdin8Jo+Pb+bIeQTb+hAfXiQu9hLqa561kRjml1BQDZb9f7oFgCx5ZExfQsOKVmjellOe16BgM4ZjyosXmqC4mkg2GhvtQ=
3 changes: 3 additions & 0 deletions .travis/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,9 @@ elif [ "$ENVIRONMENT" == "staging" ]; then

message DEPLOYING EXTRACT-DIFFS
aws ecs update-service --cluster vdb-cluster-$ENVIRONMENT --service vdb-extract-diffs-$ENVIRONMENT --force-new-deployment --endpoint https://ecs.$STAGING_REGION.amazonaws.com --region $STAGING_REGION

message DEPLOYING EXTRACT-DIFFS-NEW-GETH
aws ecs update-service --cluster vdb-cluster-$ENVIRONMENT --service vdb-extract-diffs2-$ENVIRONMENT --force-new-deployment --endpoint https://ecs.$STAGING_REGION.amazonaws.com --region $STAGING_REGION
else
message UNKNOWN ENVIRONMENT
fi
78 changes: 33 additions & 45 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

> Vulcanize DB is a set of tools that make it easier for developers to write application-specific indexes and caches for dapps built on Ethereum.

## Table of Contents
1. [Background](#background)
1. [Install](#install)
Expand All @@ -15,15 +14,11 @@


## Background
The same data structures and encodings that make Ethereum an effective and trust-less distributed virtual machine
complicate data accessibility and usability for dApp developers. VulcanizeDB improves Ethereum data accessibility by
providing a suite of tools to ease the extraction and transformation of data into a more useful state, including
allowing for exposing aggregate data from a suite of smart contracts.
The same data structures and encodings that make Ethereum an effective and trust-less distributed virtual machine complicate data accessibility and usability for dApp developers. VulcanizeDB improves Ethereum data accessibility by providing a suite of tools to ease the extraction and transformation of data into a more useful state, including allowing for exposing aggregate data from a suite of smart contracts.

VulanizeDB includes processes that sync, transform and expose data. Syncing involves
querying an Ethereum node and then persisting core data into a Postgres database. Transforming focuses on using previously synced data to
query for and transform log event and storage data for specifically configured smart contract addresses. Exposing data is a matter of getting
data from VulcanizeDB's underlying Postgres database and making it accessible.
VulcanizeDB includes processes that extract and transform data.
Extracting involves querying an Ethereum node and persisting returned data into a Postgres database.
Transforming takes that raw data and converts it into domain objects representing data from configured contract accounts.

![VulcanizeDB Overview Diagram](documentation/diagrams/vdb-overview.png)

Expand All @@ -38,7 +33,7 @@ data from VulcanizeDB's underlying Postgres database and making it accessible.
- Go 1.12+
- Postgres 11.2
- Ethereum Node
- [Go Ethereum](https://ethereum.github.io/go-ethereum/downloads/) (1.8.23+)
- Vulcanize currently requires a forked version of [Go Ethereum](https://github.com/makerdao/go-ethereum/) (1.8.23+) in order to store storage diffs.
- [Parity 1.8.11+](https://github.com/paritytech/parity/releases)

### Building the project
Expand All @@ -54,14 +49,15 @@ Be sure you have enabled Go Modules (`export GO111MODULE=on`), and build the exe

`make build`

If you need to use a different dependency than what is currently defined in `go.mod`, it may helpful to look into [the replace directive](https://github.com/golang/go/wiki/Modules#when-should-i-use-the-replace-directive).
This instruction enables you to point at a fork or the local filesystem for dependency resolution.

If you are running into issues at this stage, ensure that `GOPATH` is defined in your shell.
If necessary, `GOPATH` can be set in `~/.bashrc` or `~/.bash_profile`, depending upon your system.
It can be additionally helpful to add `$GOPATH/bin` to your shell's `$PATH`.
If you need to use a different dependency than what is currently defined in `go.mod`, it may helpful to look into [the replace directive](https://github.com/golang/go/wiki/Modules#when-should-i-use-the-replace-directive). This instruction enables you to point at a fork or the local filesystem for dependency resolution. If you are running into issues at this stage, ensure that `GOPATH` is defined in your shell. If necessary, `GOPATH` can be set in `~/.bashrc` or `~/.bash_profile`, depending upon your system. It can be additionally helpful to add `$GOPATH/bin` to your shell's `$PATH`.

### Setting up the database

**IMPORTANT NOTE - PLEASE READ**
If you're using the [MakerDAO VulcanizeDB Transformers](https://github.com/makerdao/vdb-mcd-transformers) you should follow the migration instructions there, and use that repository for maintaining your database schema. If you follow these directions and _then_ add the mcd transformers, you'll need to reset your database using the migrations there.

### Setting up the database for stand-alone users

1. Install Postgres
1. Create a superuser for yourself and make sure `psql --list` works without prompting for a password.
1. `createdb vulcanize_public`
Expand All @@ -74,8 +70,7 @@ It can be additionally helpful to add `$GOPATH/bin` to your shell's `$PATH`.

* See below for configuring additional environments

In some cases (such as recent Ubuntu systems), it may be necessary to overcome failures of password authentication from
localhost. To allow access on Ubuntu, set localhost connections via hostname, ipv4, and ipv6 from peer/md5 to trust in: /etc/postgresql/<version>/pg_hba.conf
In some cases (such as recent Ubuntu systems), it may be necessary to overcome failures of password authentication from localhost. To allow access on Ubuntu, set localhost connections via hostname, ipv4, and ipv6 from peer/md5 to trust in: /etc/postgresql/<version>/pg_hba.conf

(It should be noted that trusted auth should only be enabled on systems without sensitive data in them: development and local test databases)

Expand All @@ -101,41 +96,34 @@ localhost. To allow access on Ubuntu, set localhost connections via hostname, ip
- The `ipcPath` should be the endpoint available for your project.

## Usage
As mentioned above, VulcanizeDB's processes can be split into three categories: syncing, transforming and exposing data.

### Data syncing
To provide data for transformations, raw Ethereum data must first be synced into VulcanizeDB.
This is accomplished through the use of the `headerSync` command.
This command is described in detail [here](documentation/data-syncing.md).

### Data transformation
Data transformation uses the raw data that has been synced into Postgres to filter out and apply transformations to
specific data of interest. Since there are different types of data that may be useful for observing smart contracts, it
follows that there are different ways to transform this data. We've started by categorizing this into Generic and
Custom transformers:

- Generic Contract Transformer: Generic contract transformation can be done using a built-in command,
`contractWatcher`, which transforms contract events provided the contract's ABI is available. It also
provides some state variable coverage by automating polling of public methods, with some restrictions.
`contractWatcher` is described further [here](documentation/generic-transformer.md).

- Custom Transformers: In many cases custom transformers will need to be written to provide
more comprehensive coverage of contract data. In this case we have provided the `compose`, `execute`, and
`composeAndExecute` commands for running custom transformers from external repositories. Documentation on how to write,
build and run custom transformers as Go plugins can be found
[here](documentation/custom-transformers.md).
VulcanizeDB's processes can be split into two categories: extracting and transforming data.

### Extracting

Several commands extract raw Ethereum data to Postgres:
- `headerSync` populates block headers into the `public.headers` table - more detail [here](documentation/data-syncing.md).
- `execute` and `composeAndExecute` add configured event logs into the `public.event_logs` table.
- `extractDiffs` pulls state diffs into the `public.storage_diff` table.

### Exposing the data
[Postgraphile](https://www.graphile.org/postgraphile/) is used to expose GraphQL endpoints for our database schemas, this is described in detail [here](documentation/postgraphile.md).
### Transforming
Data transformation uses the raw data that has been synced into Postgres to filter out and apply transformations to specific data of interest.
Since there are different types of data that may be useful for observing smart contracts, it follows that there are different ways to transform this data.
We've started by categorizing this into Generic and Custom transformers:

- Generic Contract Transformer: Generic contract transformation can be done using a built-in command, `contractWatcher`, which transforms contract events provided the contract's ABI is available.
`contractWatcher` is described further [here](documentation/generic-transformer.md).

- Custom Transformers: In many cases custom transformers will need to be written to provide more comprehensive coverage of contract data.
In this case we have provided the `compose`, `execute`, and `composeAndExecute` commands for running custom transformers from external repositories.
Documentation on how to write, build and run custom transformers as Go plugins can be found [here](documentation/custom-transformers.md).

### Tests
- Replace the empty `ipcPath` in the `environments/testing.toml` with a path to a full node's eth_jsonrpc endpoint (e.g. local geth node ipc path or infura url)
- Note: must be mainnet
- Note: integration tests require configuration with an archival node
- `make test` will run the unit tests and skip the integration tests
- `make integrationtest` will run just the integration tests
- `make test` and `make integrationtest` setup a clean `vulcanize_testing` db
- `make test` and `make integrationtest` both setup a clean `vulcanize_testing` db


## Contributing
Expand All @@ -146,4 +134,4 @@ VulcanizeDB follows the [Contributor Covenant Code of Conduct](https://www.contr
For more information on contributing, please see [here](documentation/contributing.md).

## License
[AGPL-3.0](LICENSE) © Vulcanize Inc
[AGPL-3.0](LICENSE) © Vulcanize Inc
64 changes: 64 additions & 0 deletions cmd/backfillEvents.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
package cmd

import (
"fmt"

"github.com/makerdao/vulcanizedb/libraries/shared/logs"
"github.com/makerdao/vulcanizedb/utils"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)

var endingBlockNumber int64

// backfillEventsCmd represents the backfillEvents command
var backfillEventsCmd = &cobra.Command{
Use: "backfillEvents",
Short: "BackFill events from already-checked headers",
Long: `Fetch and persist events from configured transformers across a range
of headers that may have already been checked for logs. Useful when adding a
new event transformer to an instance that has already been running and marking
headers checked as it queried for the previous (now incomplete) set of logs.`,
Run: func(cmd *cobra.Command, args []string) {
SubCommand = cmd.CalledAs()
LogWithCommand = *logrus.WithField("SubCommand", SubCommand)
err := backFillEvents()
if err != nil {
logrus.Fatalf("error back-filling events: %s", err.Error())
}
logrus.Info("completed back-filling events")
},
}

func init() {
rootCmd.AddCommand(backfillEventsCmd)
backfillEventsCmd.Flags().Int64VarP(&endingBlockNumber, "ending-block-number", "e", -1, "last block from which to back-fill events")
backfillEventsCmd.MarkFlagRequired("ending-block-number")
}

func backFillEvents() error {
ethEventInitializers, _, _, exportTransformersErr := exportTransformers()
if exportTransformersErr != nil {
LogWithCommand.Fatalf("SubCommand %v: exporting transformers failed: %v", SubCommand, exportTransformersErr)
}

blockChain := getBlockChain()
db := utils.LoadPostgres(databaseConfig, blockChain.Node())

extractor := logs.NewLogExtractor(&db, blockChain)

for _, initializer := range ethEventInitializers {
transformer := initializer(&db)
err := extractor.AddTransformerConfig(transformer.GetConfig())
if err != nil {
return fmt.Errorf("error adding transformer: %w", err)
}
}

err := extractor.BackFillLogs(endingBlockNumber)
if err != nil {
return fmt.Errorf("error backfilling logs: %w", err)
}

return nil
}
4 changes: 2 additions & 2 deletions cmd/compose.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ The type of watcher that the transformer works with is specified using the
type variable for each transformer in the config. Currently there are watchers
of event data from an eth node (eth_event) and storage data from an eth node
(eth_storage), and a more generic interface for accepting contract_watcher pkg
based transformers which can perform both event watching and public method
polling (eth_contract).
based transformers which can perform event watching provided only a contract
address (eth_contract).
Transformers of different types can be ran together in the same command using a
single config file or in separate command instances using different config files
Expand Down
4 changes: 2 additions & 2 deletions cmd/composeAndExecute.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ The type of watcher that the transformer works with is specified using the
type variable for each transformer in the config. Currently there are watchers
of event data from an eth node (eth_event) and storage data from an eth node
(eth_storage), and a more generic interface for accepting contract_watcher pkg
based transformers which can perform both event watching and public method
polling (eth_contract).
based transformers which can perform event watching provided only a conctract
address (eth_contract).
Transformers of different types can be ran together in the same command using a
single config file or in separate command instances using different config files
Expand Down
Loading

0 comments on commit a6bcf75

Please sign in to comment.