Browse Source

add site counter

main
matthew 2 years ago
parent
commit
d4d47e665f
  1. 25
      searching-front/package-lock.json
  2. 3
      searching-front/package.json
  3. 125
      searching-front/services/domain-watcher.ts
  4. 14
      searching-front/services/influx.ts
  5. 29
      searching-front/services/main.ts
  6. 5
      searching-front/services/modules/influxdb/constants.ts
  7. 46
      searching-front/services/modules/influxdb/helpers.ts
  8. 52
      searching-front/services/modules/influxdb/index.ts
  9. 13
      searching-front/services/modules/influxdb/types.ts
  10. 2
      searching-front/services/parser.ts

25
searching-front/package-lock.json generated

@ -13,6 +13,8 @@
"@blitzjs/rpc": "2.0.0-beta.3", "@blitzjs/rpc": "2.0.0-beta.3",
"@elastic/elasticsearch": "8.2.1", "@elastic/elasticsearch": "8.2.1",
"@hookform/resolvers": "2.9.7", "@hookform/resolvers": "2.9.7",
"@influxdata/influxdb-client": "1.31.0",
"@influxdata/influxdb-client-apis": "1.31.0",
"@prisma/client": "4.2.1", "@prisma/client": "4.2.1",
"@types/html-to-text": "8.1.1", "@types/html-to-text": "8.1.1",
"@types/textversionjs": "1.1.1", "@types/textversionjs": "1.1.1",
@ -2198,6 +2200,19 @@
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
"dev": true "dev": true
}, },
"node_modules/@influxdata/influxdb-client": {
"version": "1.31.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.31.0.tgz",
"integrity": "sha512-8DVT3ZB/VeCK5Nn+BxhgMrAMSTseQAEgV20AK+ZMO5Fcup9XWsA9L2zE+3eBFl0Y+lF3UeKiASkiKMQvws35GA=="
},
"node_modules/@influxdata/influxdb-client-apis": {
"version": "1.31.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.31.0.tgz",
"integrity": "sha512-6ALGNLxtfffhICobOdj13Z6vj6gdQVOzVXPoPNd+w7V60zrbGhTqzXHV1KMZ/lzOb6YkRTRODbxz4W/b/7N5hg==",
"peerDependencies": {
"@influxdata/influxdb-client": "*"
}
},
"node_modules/@jridgewell/gen-mapping": { "node_modules/@jridgewell/gen-mapping": {
"version": "0.3.2", "version": "0.3.2",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz",
@ -18167,6 +18182,16 @@
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
"dev": true "dev": true
}, },
"@influxdata/influxdb-client": {
"version": "1.31.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client/-/influxdb-client-1.31.0.tgz",
"integrity": "sha512-8DVT3ZB/VeCK5Nn+BxhgMrAMSTseQAEgV20AK+ZMO5Fcup9XWsA9L2zE+3eBFl0Y+lF3UeKiASkiKMQvws35GA=="
},
"@influxdata/influxdb-client-apis": {
"version": "1.31.0",
"resolved": "https://registry.npmjs.org/@influxdata/influxdb-client-apis/-/influxdb-client-apis-1.31.0.tgz",
"integrity": "sha512-6ALGNLxtfffhICobOdj13Z6vj6gdQVOzVXPoPNd+w7V60zrbGhTqzXHV1KMZ/lzOb6YkRTRODbxz4W/b/7N5hg=="
},
"@jridgewell/gen-mapping": { "@jridgewell/gen-mapping": {
"version": "0.3.2", "version": "0.3.2",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz",

3
searching-front/package.json

@ -3,6 +3,7 @@
"version": "1.0.0", "version": "1.0.0",
"scripts": { "scripts": {
"watcher": "ts-node-esm ./services/main.ts", "watcher": "ts-node-esm ./services/main.ts",
"influx": "ts-node-esm ./services/influx.ts",
"parser": "ts-node-esm ./services/parser.ts", "parser": "ts-node-esm ./services/parser.ts",
"dev": "blitz dev", "dev": "blitz dev",
"build": "blitz build", "build": "blitz build",
@ -30,6 +31,8 @@
"@blitzjs/rpc": "2.0.0-beta.3", "@blitzjs/rpc": "2.0.0-beta.3",
"@elastic/elasticsearch": "8.2.1", "@elastic/elasticsearch": "8.2.1",
"@hookform/resolvers": "2.9.7", "@hookform/resolvers": "2.9.7",
"@influxdata/influxdb-client": "1.31.0",
"@influxdata/influxdb-client-apis": "1.31.0",
"@prisma/client": "4.2.1", "@prisma/client": "4.2.1",
"@types/html-to-text": "8.1.1", "@types/html-to-text": "8.1.1",
"@types/textversionjs": "1.1.1", "@types/textversionjs": "1.1.1",

125
searching-front/services/domain-watcher.ts

@ -1,4 +1,3 @@
import tonweb from "tonweb" import tonweb from "tonweb"
import { import {
JettonApi, JettonApi,
@ -14,83 +13,93 @@ import db from "../db/index"
import axios from "axios" import axios from "axios"
import { getTonProxy } from "./helpers" import { getTonProxy } from "./helpers"
interface SearchNFTItemsParams { interface SearchNFTItemsParams {
limit: number limit: number
offset: number offset: number
} }
const wait = (time:number) => new Promise((resolve)=>setTimeout(()=>resolve(true),time )) const getFullUrl = (dmn: string) => `http://${dmn}`
const upsertDmn = async (dmn: string, available: boolean) =>
const searchNFTItems = async ({ limit, offset }: SearchNFTItemsParams) => { await db.nftDomain.upsert({
try{ where: {
address: getFullUrl(dmn),
},
update: { available, address: getFullUrl(dmn) },
create: { available, address: getFullUrl(dmn) },
})
const wait = (time: number) => new Promise((resolve) => setTimeout(() => resolve(true), time))
console.log(`Start search limit:${limit}, offset:${offset}`) const searchNFTItems = async ({ limit, offset }: SearchNFTItemsParams) => {
await wait(1000) try {
const { data } = await axios.get( console.log(`Start search limit:${limit}, offset:${offset}`)
`https://tonapi.io/v1/nft/searchItems?collection=EQC3dNlesgVD8YbAazcauIrXBPfiVhMMr5YYk2in0Mtsz0Bz&include_on_sale=false&limit=${limit}&offset=${offset}`, await wait(1000)
{ const { data } = await axios.get(
headers:{ `https://tonapi.io/v1/nft/searchItems?collection=EQC3dNlesgVD8YbAazcauIrXBPfiVhMMr5YYk2in0Mtsz0Bz&include_on_sale=false&limit=${limit}&offset=${offset}`,
// 'Authorization': 'Bearer '+ '6c456b1e31217a79e121dcb9b506c280358d58bc86659bdbac1d737bfc3691fb', {
headers: {
// 'Authorization': 'Bearer '+ '6c456b1e31217a79e121dcb9b506c280358d58bc86659bdbac1d737bfc3691fb',
},
} }
} )
) return data.nft_items
return data.nft_items } catch (e) {
} catch (e){ return searchNFTItems({ limit, offset })
return searchNFTItems({ limit, offset }) }
}
} }
const portion = 1000 const portion = 1000
const fetchTonSite = async (url: string) => { const fetchTonSite = async (url: string) => {
const urlToFetch = `http://${url}/` try {
const response = await axios.get(urlToFetch, { const urlToFetch = `http://${url}/`
proxy: getTonProxy(), const response = await axios.get(urlToFetch, {
}) proxy: getTonProxy(),
if (!response.data) { })
console.log("Error fetch") if (!response.data) {
throw "error" console.log("Error fetch")
throw "err"
}
return url
} catch (e) {
throw url
} }
return url
} }
const main = async () => new Promise(async (resolve)=>{ const main = async () =>
// Receive typed array of owner nfts new Promise(async (resolve) => {
let count = 0 // Receive typed array of owner nfts
while (true) { let count = 0
// в nftItems 1000 сайтов while (true) {
const nftItems = await searchNFTItems({ // в nftItems 1000 сайтов
limit: portion, const nftItems = await searchNFTItems({
offset: count * portion, limit: portion,
}) offset: count * portion,
})
if (nftItems.length) { if (nftItems.length) {
for (let i = 0; i < nftItems.length; i++) { for (let i = 0; i < nftItems.length; i++) {
const nftDomainItem = nftItems[i] const nftDomainItem = nftItems[i]
if (nftDomainItem.dns) { if (nftDomainItem.dns) {
fetchTonSite(nftDomainItem.dns) fetchTonSite(nftDomainItem.dns)
.then(async (dmn) => { .then(async (dmn) => {
console.log("success dmn", dmn) console.log("success dmn", dmn)
await db.nftDomain.upsert({ upsertDmn(dmn, true)
where: { })
address: `http://${dmn}`, .catch((dmn) => {
}, upsertDmn(dmn,false)
update: { available: false, address: `http://${dmn}` },
create: { available: false, address: `http://${dmn}` },
}) })
}) }
.catch(() => {})
} }
count++
continue
} }
count++ break
continue
} }
break console.log("Finish fetch nft")
} setTimeout(() => {
console.log('Finish fetch nft') resolve(true)
setTimeout(()=>{resolve(true)}, 10000) }, 10000)
}) })
export default main export default main

14
searching-front/services/influx.ts

@ -0,0 +1,14 @@
import influxdb from "./modules/influxdb"
import db from "../db/index"
const main = async () => {
const allDomainsCount = await db.nftDomain.count()
const availableDomainsCount = await db.nftDomain.count({ where: { available: true } })
influxdb.writeSitesCount({
all: allDomainsCount,
available: availableDomainsCount,
})
}
export default main

29
searching-front/services/main.ts

@ -1,31 +1,34 @@
import dotenv from "dotenv" import dotenv from "dotenv"
import path from "path" import path from "path"
dotenv.config({ path: path.resolve(__dirname, "../.env.local") }) dotenv.config({ path: path.resolve(__dirname, "../.env.local") })
import domainWatcher from './domain-watcher' import domainWatcher from "./domain-watcher"
import parser from './parser' import parser from "./parser"
import influx from "./influx"
const run = async()=>{ const run = async()=>{
console.log('Start domain watcher') console.log('Start domain watcher')
console.time('watcher') console.time('watcher')
await domainWatcher(); await domainWatcher();
console.timeEnd('watcher') console.timeEnd('watcher')
influx()
console.log('Start parser'); console.log('Start parser');
console.time('watcher'); console.time('watcher');
await parser(); await parser();
console.timeEnd('watcher'); console.timeEnd('watcher');
} }
const second = 1000; const second = 1000
const minute= 60 * second; const minute = 60 * second
const hour = 60 * minute; const hour = 60 * minute
run(); run()
setInterval(()=>{ setInterval(() => {
console.log(new Date(), 'Health check'); console.log(new Date(), "Health check")
},hour) }, hour)
setInterval(()=>{ setInterval(() => {
console.log(new Date(), 'Cron parse start') console.log(new Date(), "Cron parse start")
run() run()
},3 * hour) }, 3 * hour)

5
searching-front/services/modules/influxdb/constants.ts

@ -0,0 +1,5 @@
export const influxToken = process.env.INFLUX_TOKEN as string;
export const influxOrg = process.env.INFLUX_ORG as string
export const influxBucket = process.env.INFLUX_BUCKET as string
export const influxPointName = 'count';
export const influxHost = 'Searching';

46
searching-front/services/modules/influxdb/helpers.ts

@ -0,0 +1,46 @@
import { fluxDuration } from "@influxdata/influxdb-client";
import { influxBucket, influxHost, influxPointName } from "./constants";
import { InfluxField, InfluxPeriod } from "./types";
export const influxQuery = (field: InfluxField, fetchPeriod: InfluxPeriod) =>{
let start;
let period;
switch(fetchPeriod){
case InfluxPeriod.H:
start ='-1h'
period = '6m'
case InfluxPeriod.D:
start ='-1d'
period = '144m'
case InfluxPeriod.W:
start ='-1w'
period = '1008m'
case InfluxPeriod.M:
start ='-1mo'
period = '3d'
case InfluxPeriod.Y:
start ='-1y'
period = '36d'
case InfluxPeriod.tenminute:
start ='10m'
period = '1m'
}
const influxPeriod = fluxDuration(period);
const influxStart = fluxDuration(start);
return `from(bucket: "${influxBucket}")
|> range(start: ${start}, stop: now())
|> filter(fn: (r) => r["host"] == "${influxHost}")
|> filter(fn: (r) => r["_field"] == "${field}")
|> filter(fn: (r) => r["_measurement"] == "${influxPointName}")
|> aggregateWindow(every: ${period}, fn: mean, createEmpty: false)
|> yield(name: "mean")`
}
export const processInfluxResult = (res:unknown[]) => {
return res.map(i=>({
value: i._value,
time: i._time
}))
}

52
searching-front/services/modules/influxdb/index.ts

@ -0,0 +1,52 @@
import { fluxDuration, InfluxDB } from "@influxdata/influxdb-client"
import { Point } from "@influxdata/influxdb-client"
import { influxBucket, influxHost, influxOrg, influxPointName, influxToken } from "./constants"
import { influxQuery, processInfluxResult } from "./helpers";
import { InfluxField, InfluxPeriod } from "./types"
interface WriteSitesCounteParams {
all: number;
available: number;
}
interface QueryParams {
field: InfluxField;
period: InfluxPeriod;
}
class InfluxDb {
private client: InfluxDB
constructor() {
this.client = new InfluxDB({ url: process.env.INFLUX_URL as string, token: influxToken })
}
getWriteApi() {
const writeApi = this.client.getWriteApi(influxOrg, influxBucket)
writeApi.useDefaultTags({ host: influxHost })
return writeApi
}
writeSitesCount({all,available}:WriteSitesCounteParams){
const writeApi = this.getWriteApi()
const pointAll = new Point(influxPointName).intField(InfluxField.ALL_SITES,all);
const pointAvailable = new Point(influxPointName).intField(InfluxField.AVAILABLE_SITES,available);
writeApi.writePoint(pointAll)
writeApi.writePoint(pointAvailable)
writeApi.close()
}
async query({field,period}:QueryParams){
const queryApi = this.client.getQueryApi(influxOrg)
const query = influxQuery(field, period);
const result = await queryApi.collectRows(query)
console.log(processInfluxResult(result));
}
getAllSiteCounts(period:InfluxPeriod){
this.query({
field:InfluxField.ALL_SITES,
period
})
}
}
export default new InfluxDb()

13
searching-front/services/modules/influxdb/types.ts

@ -0,0 +1,13 @@
export enum InfluxPeriod {
H='H',
D='D',
W='W',
M='M',
Y='Y',
tenminute='tenminute',
}
export enum InfluxField {
ALL_SITES = 'ALL_SITES',
AVAILABLE_SITES = 'AVAILABLE_SITES'
}

2
searching-front/services/parser.ts

@ -41,7 +41,7 @@ const indexWebsite = async (domain: string, path: string, subpages: SubPages = {
const main = async () => { const main = async () => {
await Elastic.initElastic() await Elastic.initElastic()
const domains = await db.nftDomain.findMany() const domains = await db.nftDomain.findMany({where:{available: true}})
console.log('Find domains', domains) console.log('Find domains', domains)
if (domains) { if (domains) {
for (const domain of domains) { for (const domain of domains) {

Loading…
Cancel
Save