Skip to content

Commit

Permalink
add script and improve listing creations
Browse files Browse the repository at this point in the history
* add script: getLogs
* separate application events from assorted events
* rm msgSender from txData, as it's incorrect and misleading
* better error handling
  • Loading branch information
kangarang committed Aug 7, 2018
1 parent 8099b69 commit 7165d53
Show file tree
Hide file tree
Showing 6 changed files with 201 additions and 84 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"test": "react-scripts test --env=jsdom",
"update:tokens": "babel-node scripts/update-tokens",
"update:abis": "babel-node scripts/addABIs",
"getLogs": "babel-node scripts/getLogs",
"storybook": "start-storybook -p 9009 -s public",
"build-storybook": "build-storybook -s public"
},
Expand Down
78 changes: 78 additions & 0 deletions scripts/getLogs.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import EthAbi from 'ethjs-abi'
import _utils from 'modules/logs/utils'
import { setEthjs } from 'libs/provider'
import { baseToConvertedUnit } from 'libs/units'
import token from './abis/EIP20.json'
import { tsToMonthDate } from '../src/utils/_datetime'

async function getLogs() {
// setup Ethjs
const ethjs = await setEthjs()

// adToken
const contractAddress = '0xd0d6d6c5fe4a677d343cc433536bb717bae167dd'
// specify event name(s) we're interested in polling
const eventNames = ['Transfer']
// specify indexed event emission args (topics)
const indexedFilterValues = {
_to: '0xb4b26709ffed2cd165b9b49eea1ac38d133d7975', // PLCRVoting
// _from: '0xDEADBEEF'
}
// token abi
const abi = token.abi
const blockRange = {
fromBlock: '4000000',
toBlock: 'latest',
}

// get filter
const filter = await _utils.getFilter(
contractAddress,
eventNames,
indexedFilterValues,
abi,
blockRange
)
console.log('created a filter.')
console.log('getting logs...')
// get raw encoded logs
const rawLogs = await ethjs.getLogs(filter)
console.log('decoding..')
// decode logs
const decoder = await EthAbi.logDecoder(abi)
const decodedLogs = await decoder(rawLogs)

// package the logs into an array of neat objects
const lawgs = await Promise.all(
rawLogs.map(async (log, index) => {
const { block, tx } = await _utils.getBlockAndTxnFromLog(log, ethjs)
const { _value, _from } = decodedLogs[index]
return {
from: _from,
value: baseToConvertedUnit(_value, '9'),
txHash: tx.hash,
blockNumber: block.number.toString(),
date: tsToMonthDate(block.timestamp.toNumber()),
}
})
)

// filter the logs by Transfer value
const filterValue = 5000000
// whale-watching
const filteredLogs = lawgs.filter(lawg => lawg.value > filterValue)
// prettier-ignore
console.log(`${rawLogs.length} total. ${filteredLogs.length} logs with value greater than ${filterValue}.\n`)

// print details
return filteredLogs.forEach(({ from, value, txHash, blockNumber, date }) => {
console.log('from:', from)
console.log('value:', value)
console.log('txHash:', txHash)
console.log('blockNumber:', blockNumber)
console.log('date:', date)
console.log('')
})
}

getLogs()
2 changes: 2 additions & 0 deletions src/libs/provider.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ let ethjs = undefined
// return local rpc otherwise
function setProvider() {
if (
typeof window !== 'undefined' &&
typeof window.web3 !== 'undefined' &&
typeof window.web3.currentProvider !== 'undefined'
) {
Expand All @@ -42,6 +43,7 @@ function setProvider() {
// rinkeby infura
return new Ethjs.HttpProvider(`https://rinkeby.infura.io`)
}
return new Ethjs.HttpProvider(`https://mainnet.infura.io`)
}

// set ethjs and return it
Expand Down
185 changes: 107 additions & 78 deletions src/modules/listings/sagas.js
Original file line number Diff line number Diff line change
@@ -1,18 +1,21 @@
import { all, take, takeEvery, fork, call, put, select } from 'redux-saga/effects'
import { delay } from 'redux-saga'
import sortBy from 'lodash/fp/sortBy'

import * as logsTypes from '../logs/types'
import { selectAllListings } from './selectors'
import { selectTCR, selectNetwork, selectAccount } from '../home/selectors'

import * as actions from './actions'
import { baseToConvertedUnit } from 'libs/units'
import {
updateListings,
createListing,
updateAssortedListings,
findListing,
// findListing,
} from './utils'
import { delay } from 'redux-saga'

import * as logActions from 'modules/logs/actions'
import { baseToConvertedUnit } from 'libs/units'
import { saveSettings } from 'libs/localStorage'

export default function* rootListingsSaga() {
yield takeEvery(logsTypes.DECODE_LOGS_SUCCEEDED, handleNewPollLogsSaga)
Expand All @@ -34,103 +37,129 @@ export function* listenForApplications() {
// ipfs.infura rate limit: > 6 requests at a time
// workaround: batch the applications and concat results
function* batchCreateListings(applications, listings) {
const chunkApplications = applications.slice(0, 4)
console.log('chunkApplications:', chunkApplications)
console.log('listings:', listings)

if (chunkApplications.length > 0) {
const chunkListings = yield all(
chunkApplications.map(application =>
createListing(application.logData, application.txData, application.msgSender)
try {
const chunkApplications = applications.slice(0, 4)

if (chunkApplications.length > 0) {
const chunkListings = yield all(
chunkApplications.map(application =>
createListing(application.logData, application.txData)
)
)
if (applications.length > 4) {
yield call(delay, 400)
}

console.log('batching..')
return yield call(
batchCreateListings,
applications.slice(4),
listings.concat(chunkListings)
)
)
if (applications.length > 4) {
yield call(delay, 400)
}
return yield call(
batchCreateListings,
applications.slice(4),
listings.concat(chunkListings)
)
return listings
} catch (error) {
console.log('batch create listings error:', error)
}
return listings
}

// TODO: check for involved listings (Activities)
// TODO: discard stale listings
export function* handleNewPollLogsSaga(action) {
try {
const allListings = yield select(selectAllListings)
const tcr = yield select(selectTCR)
const network = yield select(selectNetwork)
const account = yield select(selectAccount)
const logs = action.payload

// filter for application events
const applicantLogs = logs.filter(log => log.eventName === '_Application')
const assortedLogs = logs.filter(log => log.eventName !== '_Application')

if (applicantLogs.length) {
console.log(applicantLogs.length, '_Application logs:', applicantLogs)
// create listings
let listings

if (network === 'mainnet') {
// batch for ipfs
listings = yield call(batchCreateListings, applicantLogs, [])
} else {
listings = yield all(
applicantLogs.map(appLog =>
createListing(appLog.logData, appLog.txData, appLog.msgSender)
)
)
}
const sortedApplicantLogs = sortBy([l => l.txData.blockTimestamp], applicantLogs)
yield call(handleApplicationLogsSaga, sortedApplicantLogs)
}

// filter for all other types of events
const assortedLogs = logs.filter(log => log.eventName !== '_Application')

if (assortedLogs.length) {
// update listings
const applications = yield call(updateListings, listings, allListings)
// check equality
if (applications.equals(allListings)) {
console.log('listings === allListings')
} else {
yield put(actions.setListings(applications))
}
const sortedAssortedLogs = sortBy([l => l.txData.blockTimestamp], assortedLogs)
yield call(handleAssortedLogsSaga, sortedAssortedLogs)
}
} catch (error) {
yield put(logActions.pollLogsFailed(error))
}
}

function* handleApplicationLogsSaga(appLogs) {
try {
const allListings = yield select(selectAllListings)
// console.log(logs.length, '_Application logs:', logs)
const network = yield select(selectNetwork)

// create listings
let listings
if (network === 'mainnet' && !appLogs[0].logData.listingID) {
// batch for ipfs
listings = yield call(batchCreateListings, appLogs, [])
// console.log('listings:', listings)
} else {
listings = yield all(
appLogs.map(appLog => createListing(appLog.logData, appLog.txData))
)
}

// update listings
if (assortedLogs.length) {
console.log(assortedLogs.length, 'assorted logs:', assortedLogs)

// print: address | numTokens listingID
// 0xd09cc3bc | 2345 yeehaw
assortedLogs.forEach(event => {
const match = findListing(event.logData, allListings)
if (event.logData.numTokens && match) {
console.log(
event.msgSender.slice(0, 10),
' | ',
baseToConvertedUnit(
event.logData.numTokens,
tcr.get('tokenDecimals')
).toString(),
match.get('listingID')
)
}
const applications = yield call(updateListings, listings, allListings)
// check equality to current redux state
if (applications.equals(allListings)) {
console.log('applications === allListings')
} else {
yield put(actions.setListings(applications))
// Save settings: persist state
// lastReadBlkNum: get from the last item in the array of various event logs
yield call(saveSettings, {
persistState: true,
lastReadBlockNumber: appLogs[appLogs.length - 1].txData.blockTimestamp,
})
}
} catch (error) {
yield put(logActions.pollLogsFailed(error))
}
}

const updatedListings = yield call(
updateAssortedListings,
assortedLogs,
allListings,
account
)
function* handleAssortedLogsSaga(logs) {
try {
const allListings = yield select(selectAllListings)
const tcr = yield select(selectTCR)
const account = yield select(selectAccount)
// console.log(logs.length, 'assorted logs:', logs)

// check: equality
if (updatedListings.equals(allListings)) {
console.log('updatedListings === allListings')
} else {
yield put(actions.setListings(updatedListings))
}
// print: address | numTokens listingID
// 0xd09cc3bc | 2345 yeehaw
// logs.forEach(event => {
// const match = findListing(event.logData, allListings)
// if (event.logData.numTokens && match) {
// console.log(
// event.txOrigin.slice(0, 10),
// ' | ',
// baseToConvertedUnit(
// event.logData.numTokens,
// tcr.get('tokenDecimals')
// ).toString(),
// match.get('listingID')
// )
// }
// })

const updatedListings = yield call(updateAssortedListings, logs, allListings, account)

// check: equality
if (updatedListings.equals(allListings)) {
console.log('updatedListings === allListings')
} else {
yield put(actions.setListings(updatedListings))
}
} catch (error) {
console.log('error;', error)
yield put(logActions.pollLogsFailed(error))
}
}
16 changes: 12 additions & 4 deletions src/modules/listings/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,18 @@ export async function handleMultihash(listingHash, data) {
}

// creates a listing entity from _Application log
// I - decoded log, block/tx info, msgSender
// I - decoded log, block/tx info
// O - listing object
export async function createListing(log, blockTxn, owner) {
let { listingHash, deposit, appEndDate, listingID, data, _eventName } = log
export async function createListing(logData, blockTxn) {
let {
listingHash,
deposit,
appEndDate,
listingID,
data,
_eventName,
applicant,
} = logData
if (_eventName !== '_Application') {
throw new Error('not an application')
}
Expand All @@ -88,7 +96,7 @@ export async function createListing(log, blockTxn, owner) {
// starting structure for every listing entity
const listing = {
listingHash,
owner,
owner: applicant,
data,
listingData,
listingID,
Expand Down
3 changes: 1 addition & 2 deletions src/modules/logs/sagas/poll.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ export function* decodeLogsSaga(action) {
const decoder = yield call(EthAbi.logDecoder, abi)
const decodedLogs = yield call(decoder, rawLogs)

// consolidate: logData, txData, eventName, msgSender
// consolidate: logData, txData, eventName
const lawgs = yield all(
rawLogs.map(async (log, index) => {
const { block, tx } = await _utils.getBlockAndTxnFromLog(log, ethjs)
Expand All @@ -102,7 +102,6 @@ export function* decodeLogsSaga(action) {
logData,
txData,
eventName: logData._eventName,
msgSender: tx.from,
}
})
)
Expand Down

0 comments on commit 7165d53

Please sign in to comment.