Comment on page
GraphQL Client
To reach our API, we use the Google Script extension available in Google Sheets to fetch and structure data from the endpoint. Google script is an extension similar to VBA benefiting from an integration with the full range of Google Products (Sheets, Docs, Cloud, Compute Engine etc.)
If we are calling a simple query, we can simply use our client
getApiTimeTravelData
to fetch the data and write it on a Sheet. For time travel queries, we need to construct a query using buildQueryaccountCTokenstt
with a list of timestamps. Simple Query
Time Travel Query
// Some code/** Main function to call, gets the data from the api and write in spreadsheet
* recursively get all basic queries **/
function fetchInverse() {
var queries = [
'accountCTokens',
]
var api = 'prod'
for (let i = 0; i < queries.length; i++) {
var query = getQuery(queries[i])
var query_url = getApiLink(api)
var data = getApiTimeTravelData(query_url, [query])
writeData(data, queries[i])
}
}
The following query fetches a snapshot of the market entity every day from a specific timestamp.
/** Main function to call, gets the data from the api and write in spreadsheet
* recursively get some timetravel data queries **/
function fetch_accountCTokens_tt() {
var start_date = 1615161600 // Monday, March 8, 2021 0:00:00 GMT
var frequency = 3600 * 24 * 7 // Weekly frequency for account, high payload
var batch_size = 100000// In case the payload is too large, try to lower this number
// fetches block using timestamp info and build time travel query, returns an array of queries alone without {}
var query_tt = buildQueryaccountCTokenstt(start_date, frequency)
// build n number of queries depending on batch size , returns an array of queries bulked with {}
var queries = splitTimeTravelQuery(query_tt, batch_size)
// fetches queries bulked, get an array with [[header][data]]
var data = getApiTimeTravelData(getApiLink('prod'), queries)
// writes header and data, header should always be first element and data second
writeData(data, 'accountCTokens_tt')
}
Our Graph Client
getApiTimeTravelData
can be used to fetch both simple and time travel queries. In the case of a time travel query, it will use two helpers :
getBlockFromTimestamp
, to retrieve a list of blocks from the list of timestamps on a special subgraph dedicated to indexing blocks metadata. This list of blocks is then used to build the time travel query with the proper blocks as parameters.splitTimeTravelQuery
to split the query payload in smaller more manageable payloads in order to not overload the endpoint
getApiTimeTravelData
getBlockFromTimestamp
splitTimeTravelQuery
/** Main Client to fetch data :
* Input automatically skip_param to fetch queries iteratively with more than 1000 results
* If skip > 5000 then will proceed searching with IDs
*/
function getApiTimeTravelData(url_query, queries) {
var final_data = []
for (query in queries) {
var totalData = []
var has_skipped = false
var areData = true
var skip = 0
while (areData) {
/** Condition to format skip param due to API limitation to 5000 skips
* If more than 5000 skips then we switch to id_gt as condition to
* fetch data faster
*/
if (skip == 6000) {
var skip = 0;
has_skipped = true
var queryIter = queries[query].replaceAll("skip_param", skip.toString()).replaceAll('first', 'where: {id_gt:"' + lastId + '"}, first');
}
else if (has_skipped == true) {
skip = 0
var queryIter = queries[query].replaceAll("skip_param", skip.toString()).replaceAll('first', 'where: {id_gt:"' + lastId + '"}, first');
}
else {
var queryIter = queries[query].replaceAll("skip_param", skip.toString());
}
var data = getApiData(url_query, queryIter);
//Map all sub-tables fetched by querymarket
for (var i in data['data']) {
var arrayData = data['data'][i]
var mapping = ''
if (arrayData.length > 0) {
var keys = FetchKeysLevel(arrayData[0])
var columns = keys[1]
for (var key in columns) {
mapping = (mapping + "," + columns[key])
}
var headers = keys[0]
}
// Condition to map correctly time travel queries (based on block)
// and non-timetravel queries
if (queryIter.includes('block:')) {
mapping = ("[" +i.replace('t', '')+","+ mapping.substring(1) + "]")}
else{
mapping = ('[' + mapping.substring(1) + ']')
}
var grouped = arrayData.map(function (e) { return eval(mapping) })
totalData.push(...grouped)
}
skip += 1000
areData = (arrayData.length > 0)
//if (typeof arrayData == 'undefined'){areData = false}else{ areData = (arrayData.length > 0)}
if (areData) {
var headers = []
if (queryIter.includes('block:')) { headers.push('Timestamp') }
var keys = FetchKeysLevel(arrayData[0])[0]
for (let header in keys) { headers.push(keys[header]) }
var lastId = arrayData[arrayData.length - 1]['id']
}
}
final_data.push(...totalData)
}
return [headers, final_data]
}
/** Function to build query and fetch list of blocks from a list of timestamps **/
function getBlockFromTimestamp(timestamp_start, delta_per_period) {
var period = 1
var timestamps = []
var maxTimeStamp = Math.round(new Date().getTime() / 1000) - 720
var url_query_block = getApiLink('block')
// Generate list of timestamp based on the parameters provided in the function
while ((timestamp_start + period * delta_per_period) < maxTimeStamp) {
timestamps = [...timestamps, (timestamp_start + period * delta_per_period)]
period += 1
}
// Add latest observation point to the list
timestamps.push(maxTimeStamp)
var queryblock = []
for (let i = 0; i < timestamps.length; i++) {
queryblock.push('tblockTimeStamp_param1:blocks(first: 1,orderBy: number,orderDirection: asc,where: {timestamp_gte: blockTimeStamp_param1,timestamp_lt: blockTimeStamp_param2}){number}')
queryblock[i] = queryblock[i].replace('blockTimeStamp_param1', timestamps[i]).replace('blockTimeStamp_param1', timestamps[i])
queryblock[i] = queryblock[i].replace('blockTimeStamp_param2', timestamps[i] + 600)
}
var queries = splitTimeTravelQuery(queryblock, 50000)
var all_blocks = {}
for (let j = 0; j < queries.length; j++) {
var data = getApiData(url_query_block, queries[j])
for (k in data['data']) {
all_blocks[k] = data['data'][k]
}
}
console.log('Block data retrieved')
return all_blocks
}
function splitTimeTravelQuery(query, max_size_per_batch) {
var queries = []
var query_array = query
var subquery_length = query[0].length
var query_length = query.length * subquery_length
var batch_number = Math.ceil(query_length / max_size_per_batch)
var subqueries_per_batch = Math.min(Math.floor(max_size_per_batch / subquery_length), query_array.length)
for (let i = 0; i < batch_number; i++) {
var query_string = '{'
for (let j = 0; j < subqueries_per_batch; j++) {
query_string += query_array[i * subqueries_per_batch + j]
}
query_string += '}'
query_string = query_string.replaceAll('undefined', '')
queries.push(query_string)
}
return queries
}
Queries are stored in a separate file and function to kep our script tidy, then called from the main script. Since our client is automatically imputing the skip and first parameters based on the query result size ; we need to make it amendable by the script.
Example with
accountCTokens
entity :Simple Query
Time Travel Query
if (name == 'accountCTokens') {
var queryString = '{\
query:accountCTokens(first:1000, skip: skip_param, orderBy: id, orderDirection: asc){\
id\
account {\
id\
}\
market {\
id\
exchangeRate\
underlyingSymbol\
underlyingPrice\
borrowIndex\
}\
accountBorrowIndex\
accrualBlockNumber\
enteredMarket\
cTokenBalance\
storedBorrowBalance\
symbol\
totalUnderlyingBorrowed\
totalUnderlyingRedeemed\
totalUnderlyingRepaid\
totalUnderlyingSupplied\
}\
/**
* Function to build the accountctoken query from the list of blocks
* obtained with the timestamps
**/
function buildQueryaccountCTokenstt(timestamp_start, delta_per_period) {
var timestamps = getBlockFromTimestamp(timestamp_start, delta_per_period);
var query_array = []
for (var i in timestamps) {
var query_market = ''
query_market += 'blocktimestamp_param:accountCTokens(first:1000, skip:skip_param, block: {number: block_param}){\
id\
account {\
id\
}\
market {\
id\
exchangeRate\
underlyingSymbol\
underlyingPrice\
borrowIndex\
}\
accountBorrowIndex\
accrualBlockNumber\
enteredMarket\
cTokenBalance\
storedBorrowBalance\
symbol\
totalUnderlyingBorrowed\
totalUnderlyingRedeemed\
totalUnderlyingRepaid\
totalUnderlyingSupplied\
}'
query_market = query_market.replace('block_param', timestamps[i][0]['number'])
query_market = query_market.replace('blocktimestamp_param', i)
query_array.push(query_market)
}
return query_array
}
Others
We also use a custom helper
FetchKeysLevel
to retrieve the headers name in the output data. Finally, the data is simply written into a Google Sheet with its headers using writeData
. This last function can easily be amended to write into any other database infrastructure connected to the web.FetchKeysLevel
writeData
/**Fetches column names and returns two arrays :
* 1) one with the headers name, (if two levels then level1.level2)
* 2) one with formatting e['level1']['level2'] for mapping command
* in get ApiDataIterate
*/
function FetchKeysLevel(obj) {
let objKeys = new Array;
let objHeaders = new Array;
let keyValues = Object.entries(obj);
for (let i in keyValues) {
if (typeof keyValues[i][1] == "object") {
for (let j in keyValues[i][1]) {
objKeys.push("e['" + keyValues[i][0] + "']['" + j + "']");
objHeaders.push(keyValues[i][0] + "." + j)
}
} else {
objKeys.push("e['" + keyValues[i][0] + "']");
objHeaders.push(keyValues[i][0]);
}
}
return [objHeaders, objKeys]
}
/** Write data to the corresponding sheet on the first Row
Data input is a table [header, data]
*/
function writeData(data, sheetname) {
Logger.log('Writing ' + (data[1].length + ',' + data[1][0].length) + ' data on sheet ' + sheetname)
Logger.log(data[1].length + ',' + data[1][0].length)
var ss = SpreadsheetApp.getActiveSpreadsheet().getSheetByName(sheetname);
ss.clear()
ss.getRange(1, 1, 1, data[0].length).setValues([data[0]]);
ss.getRange(2, 1, data[1].length, data[1][0].length).setValues(data[1]);
}
Last modified 1yr ago