Added config options for disabling specific components as well

This commit is contained in:
Marco van Dijk 2022-03-07 23:33:45 +01:00
parent 8792113e08
commit 1db2a934a2
4 changed files with 49 additions and 17 deletions

View File

@ -30,6 +30,8 @@ Orchestrators can be inspected by clicking on their address, showing all of thei
- Returns a JSON object of the current data on the given Orchestrator - Returns a JSON object of the current data on the given Orchestrator
- https://www.nframe.nl/livepeer/getOrchestrator - https://www.nframe.nl/livepeer/getOrchestrator
- Returns a JSON object of the default Orchestrator selected in the backend - Returns a JSON object of the default Orchestrator selected in the backend
- https://www.nframe.nl/livepeer/prometheus/0x847791cbf03be716a7fe9dc8c9affe17bd49ae5e
- Returns a Prometheus compatible output of all data except blockchain Events
# How # How
@ -113,7 +115,20 @@ Note that running your own backend is not required, since you can also use the e
Nonetheless, running your own backend is recommended so you can make changes as well as keep things quick if you are far away from Western Europe, where nframe.nl is hosted Nonetheless, running your own backend is recommended so you can make changes as well as keep things quick if you are far away from Western Europe, where nframe.nl is hosted
## Dependencies ## Dependencies
nodejs, npm, pm2 nodejs, npm, pm2, mongodb
### Standalone for Prometheus/Grafana
The backend can be run in 'simple' mode. In this mode the entire API stays functioning, except for the /getEvents API call. Perfect for using the API just for pulling the data as JSON or using the Prometheus endpoint and requires much let configuration to set up.
Example Prometheus.yml entry for accessing the API using Prometheus:
- job_name: 'livepeer'
scheme: https
scrape_interval: 30s
metrics_path: /api/livepeer/prometheus/0x847791cbf03be716a7fe9dc8c9affe17bd49ae5e
static_configs:
- targets: ['nframe.nl']
### MongoDB Cloud
If not running the backend in simple mode, the free tier of MongoDB Cloud Services is recommended in order to store blockchain Events persistently. Otherwise the backend has to parse all blockchain events each time it boots
## Initial Config ## Initial Config
Download copy of repository and change directory to it Download copy of repository and change directory to it
@ -122,7 +137,7 @@ Download copy of repository and change directory to it
Download all external dependencies we are using Download all external dependencies we are using
- `npm install` - `npm install`
-
Set configuration variables Set configuration variables
- `nano /var/www/backend/src/config.js` - `nano /var/www/backend/src/config.js`
- `PORT` can be left as is, defines at what port the backend will accept API requests - `PORT` can be left as is, defines at what port the backend will accept API requests
@ -135,6 +150,10 @@ Set configuration variables
- `API_L2_HTTP` should be edited to the HTTP url of a L2 ethereum node - `API_L2_HTTP` should be edited to the HTTP url of a L2 ethereum node
- `API_L2_WS` should be edited to the WS url of an Arbitrum mainnet Alchemy node - `API_L2_WS` should be edited to the WS url of an Arbitrum mainnet Alchemy node
- `CONF_DEFAULT_ORCH` should be edited to the public address of your own Orchestrator. This is the default Orchestrator the backend will return - `CONF_DEFAULT_ORCH` should be edited to the public address of your own Orchestrator. This is the default Orchestrator the backend will return
- 'CONF_SIMPLE_MODE' if set to true, will disable live smart contract monitoring. Also disabled all MONGO related functionalities. Config flags 'API_L2_WS', 'MONGO_URI', 'MONGO_URI_DEV' and 'MONGO_URI_LOCAL' can be ignored if this flag is set to true.
- 'CONF_TIMEOUT_CMC' time in milliseconds of how long coinmarketcap data is kept in the cache before a new request is made. Recommended is around 5-6 minutes in order to stay below the default daily soft cap
- 'CONF_TIMEOUT_ALCHEMY' time in milliseconds of how long blockchain information, like gas prices, is being kept in the cache
- 'CONF_TIMEOUT_LIVEPEER' time in milliseconds of how long livepeer data, like orchestrator and delegator information, is being kept in the cache
## Developing ## Developing
Open the directory of your backend and run Open the directory of your backend and run

View File

@ -16,5 +16,7 @@ export const {
CONF_SIMPLE_MODE = false, CONF_SIMPLE_MODE = false,
CONF_TIMEOUT_CMC = 360000, CONF_TIMEOUT_CMC = 360000,
CONF_TIMEOUT_ALCHEMY = 2000, CONF_TIMEOUT_ALCHEMY = 2000,
CONF_TIMEOUT_LIVEPEER = 60000 CONF_TIMEOUT_LIVEPEER = 60000,
CONF_DISABLE_SYNC = false,
CONF_DISABLE_DB = false
} = process.env; } = process.env;

View File

@ -5,7 +5,8 @@ const apiRouter = express.Router();
import { import {
API_CMC, API_L1_HTTP, API_L2_HTTP, API_L2_WS, API_CMC, API_L1_HTTP, API_L2_HTTP, API_L2_WS,
CONF_DEFAULT_ORCH, CONF_SIMPLE_MODE, CONF_TIMEOUT_CMC, CONF_DEFAULT_ORCH, CONF_SIMPLE_MODE, CONF_TIMEOUT_CMC,
CONF_TIMEOUT_ALCHEMY, CONF_TIMEOUT_LIVEPEER CONF_TIMEOUT_ALCHEMY, CONF_TIMEOUT_LIVEPEER, CONF_DISABLE_SYNC,
CONF_DISABLE_DB
} from "../config"; } from "../config";
// Do API requests to other API's // Do API requests to other API's
const https = require('https'); const https = require('https');
@ -101,13 +102,16 @@ const getBlock = async function (blockNumber) {
// Else get it and cache it // Else get it and cache it
const thisBlock = await web3layer2.eth.getBlock(blockNumber); const thisBlock = await web3layer2.eth.getBlock(blockNumber);
console.log("Caching new block " + thisBlock.number + " mined at " + thisBlock.timestamp); console.log("Caching new block " + thisBlock.number + " mined at " + thisBlock.timestamp);
const blockObj = { const blockObj = {
blockNumber: thisBlock.number, blockNumber: thisBlock.number,
blockTime: thisBlock.timestamp blockTime: thisBlock.timestamp
}; };
blockCache.push(blockObj); blockCache.push(blockObj);
const dbObj = new Block(blockObj); if (!CONF_DISABLE_DB) {
await dbObj.save(); const dbObj = new Block(blockObj);
await dbObj.save();
}
return thisBlock; return thisBlock;
} }
@ -139,8 +143,10 @@ if (!CONF_SIMPLE_MODE) {
blockTime: thisBlock.timestamp blockTime: thisBlock.timestamp
} }
if (!isSyncing) { if (!isSyncing) {
const dbObj = new Event(eventObj); if (!CONF_DISABLE_DB) {
await dbObj.save(); const dbObj = new Event(eventObj);
await dbObj.save();
}
eventsCache.push(eventObj); eventsCache.push(eventObj);
} else { } else {
syncCache.push(eventObj); syncCache.push(eventObj);
@ -179,8 +185,10 @@ const doSync = function () {
blockNumber: thisBlock.number, blockNumber: thisBlock.number,
blockTime: thisBlock.timestamp blockTime: thisBlock.timestamp
} }
const dbObj = new Event(eventObj); if (!CONF_DISABLE_DB) {
await dbObj.save(); const dbObj = new Event(eventObj);
await dbObj.save();
}
eventsCache.push(eventObj); eventsCache.push(eventObj);
} }
} }
@ -238,15 +246,17 @@ const handleSync = async function () {
syncCache = []; syncCache = [];
for (const eventObj of liveEvents) { for (const eventObj of liveEvents) {
console.log("Parsing event received while syncing"); console.log("Parsing event received while syncing");
const dbObj = new Event(eventObj); if (!CONF_DISABLE_DB) {
await dbObj.save(); const dbObj = new Event(eventObj);
await dbObj.save();
}
eventsCache.push(eventObj); eventsCache.push(eventObj);
} }
} }
console.log('done syncing') console.log('done syncing')
isSyncing = false; isSyncing = false;
}; };
if (!isSyncRunning && !CONF_SIMPLE_MODE) { if (!isSyncRunning && !CONF_SIMPLE_MODE && !CONF_DISABLE_SYNC) {
handleSync(); handleSync();
} }

View File

@ -6,7 +6,8 @@ import connectStore from "connect-mongo";
import { userRouter, sessionRouter, livepeerRouter } from './routes/index'; import { userRouter, sessionRouter, livepeerRouter } from './routes/index';
import { import {
NODE_PORT, NODE_ENV, MONGO_URI, SESS_NAME, SESS_SECRET, NODE_PORT, NODE_ENV, MONGO_URI, SESS_NAME, SESS_SECRET,
SESS_LIFETIME , MONGO_URI_DEV, MONGO_URI_LOCAL, CONF_SIMPLE_MODE SESS_LIFETIME , MONGO_URI_DEV, MONGO_URI_LOCAL, CONF_SIMPLE_MODE,
CONF_DISABLE_DB
} from "./config"; } from "./config";
// Env variable which determines which DB to connect to // Env variable which determines which DB to connect to
const { NODE_ENV: mode } = process.env; const { NODE_ENV: mode } = process.env;
@ -14,7 +15,7 @@ const { NODE_ENV: mode } = process.env;
(async () => { (async () => {
try { try {
// Make DB connection if needed // Make DB connection if needed
if (!CONF_SIMPLE_MODE){ if (!CONF_SIMPLE_MODE && !CONF_DISABLE_DB){
if (mode == "production"){ if (mode == "production"){
await mongoose.connect(MONGO_URI, { useNewUrlParser: true, useFindAndModify: false}); await mongoose.connect(MONGO_URI, { useNewUrlParser: true, useFindAndModify: false});
}else if (mode == "development"){ }else if (mode == "development"){
@ -26,7 +27,7 @@ const { NODE_ENV: mode } = process.env;
} }
console.log('MongoDB connected on ' + mode); console.log('MongoDB connected on ' + mode);
}else{ }else{
console.log('Running in basic mode' ); console.log('Running without a database connection' );
} }
// Web application framework // Web application framework
const app = express(); const app = express();
@ -36,7 +37,7 @@ const { NODE_ENV: mode } = process.env;
app.use(express.json()); app.use(express.json());
let MongoStore; let MongoStore;
if (!CONF_SIMPLE_MODE){ if (!CONF_SIMPLE_MODE && !CONF_DISABLE_DB){
// Import session module // Import session module
MongoStore = connectStore(session); MongoStore = connectStore(session);
// Declare session data // Declare session data