Answersheet to 'Events: handling and receiving events'
// node/index.ts
import {
LRUCache,
Service,
ServiceContext,
ParamsContext,
RecorderState,
method,
} from '@vtex/api'
import { Clients } from './clients'
import { analytics } from './handler/analytics'
import { updateLiveUsers } from './events/liveUsersUpdate'
// Create a LRU memory cache for the Status client.
// The @vtex/api HttpClient respects Cache-Control headers and uses the provided cache.
const memoryCache = new LRUCache<string, any>({ max: 5000 })
metrics.trackCache('status', memoryCache)
const TREE_SECONDS_MS = 3 * 1000
const CONCURRENCY = 10
declare global {
type Context = ServiceContext<Clients, State>
interface State extends RecorderState {
code: number
}
}
export default new Service<Clients, State, ParamsContext>({
clients: {
implementation: Clients,
options: {
default: {
retries: 2,
timeout: 10000,
},
events: {
exponentialTimeoutCoefficient: 2,
exponentialBackoffCoefficient: 2,
initialBackoffDelay: 50,
retries: 1,
timeout: TREE_SECONDS_MS,
concurrency: CONCURRENCY,
},
},
},
routes: {
analytics: method({
GET: [analytics],
}),
},
events: {
liveUsersUpdate: updateLiveUsers,
},
})
//node/event/liveUsersUpdate.ts
export async function updateLiveUsers() {
console.log('EVENT HANDLER: received event')
}
// node/service.json
{
"memory": 128,
"ttl": 10,
"timeout": 10,
"minReplicas": 2,
"maxReplicas": 10,
"workers": 4,
"events": {
"liveUsersUpdate": {
"sender": "vtex.events-example",
"keys": ["send-event"]
}
},
"routes": {
"analytics": {
"path": "/_v/app/analytics/realTime",
"public": true
}
}
}
Help us make this content better!
VTEX IO courses are open source. If you see something wrong, you can open a pull request!
Make a contributionUpdated over 3 years ago