mirror of
https://github.com/simple-login/app.git
synced 2024-11-13 07:31:12 +01:00
fix: event listener (#2119)
* fix: commit transaction after taking event * feat: allow to reconnect to postgres for event listener * chore: log sync events pending to process to metrics * fix: make dead_letter runner able to process events without needing to have lock on the event * chore: close Session after reconnect * refactor: make EventSource emit only events that can be processed
This commit is contained in:
parent
450322fff1
commit
6862ed3602
4 changed files with 59 additions and 25 deletions
|
@ -3699,7 +3699,10 @@ class SyncEvent(Base, ModelMixin):
|
|||
AND taken_time IS NULL
|
||||
"""
|
||||
args = {"taken_time": arrow.now().datetime, "sync_event_id": self.id}
|
||||
|
||||
res = Session.execute(sql, args)
|
||||
Session.commit()
|
||||
|
||||
return res.rowcount > 0
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -13,6 +13,8 @@ from typing import Callable, NoReturn
|
|||
_DEAD_LETTER_THRESHOLD_MINUTES = 10
|
||||
_DEAD_LETTER_INTERVAL_SECONDS = 30
|
||||
|
||||
_POSTGRES_RECONNECT_INTERVAL_SECONDS = 5
|
||||
|
||||
|
||||
class EventSource(ABC):
|
||||
@abstractmethod
|
||||
|
@ -22,9 +24,19 @@ class EventSource(ABC):
|
|||
|
||||
class PostgresEventSource(EventSource):
|
||||
def __init__(self, connection_string: str):
|
||||
self.__connection = psycopg2.connect(connection_string)
|
||||
self.__connection_string = connection_string
|
||||
self.__connect()
|
||||
|
||||
def run(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||
while True:
|
||||
try:
|
||||
self.__listen(on_event)
|
||||
except Exception as e:
|
||||
LOG.warn(f"Error listening to events: {e}")
|
||||
sleep(_POSTGRES_RECONNECT_INTERVAL_SECONDS)
|
||||
self.__connect()
|
||||
|
||||
def __listen(self, on_event: Callable[[SyncEvent], NoReturn]):
|
||||
self.__connection.set_isolation_level(
|
||||
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
|
||||
)
|
||||
|
@ -44,12 +56,24 @@ class PostgresEventSource(EventSource):
|
|||
webhook_id = int(notify.payload)
|
||||
event = SyncEvent.get_by(id=webhook_id)
|
||||
if event is not None:
|
||||
on_event(event)
|
||||
if event.mark_as_taken():
|
||||
on_event(event)
|
||||
else:
|
||||
LOG.info(
|
||||
f"Event {event.id} was handled by another runner"
|
||||
)
|
||||
else:
|
||||
LOG.info(f"Could not find event with id={notify.payload}")
|
||||
except Exception as e:
|
||||
LOG.warn(f"Error getting event: {e}")
|
||||
|
||||
def __connect(self):
|
||||
self.__connection = psycopg2.connect(self.__connection_string)
|
||||
|
||||
from app.db import Session
|
||||
|
||||
Session.close()
|
||||
|
||||
|
||||
class DeadLetterEventSource(EventSource):
|
||||
@newrelic.agent.background_task()
|
||||
|
@ -73,3 +97,4 @@ class DeadLetterEventSource(EventSource):
|
|||
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
|
||||
except Exception as e:
|
||||
LOG.warn(f"Error getting dead letter event: {e}")
|
||||
sleep(_DEAD_LETTER_INTERVAL_SECONDS)
|
||||
|
|
|
@ -18,31 +18,25 @@ class Runner:
|
|||
@newrelic.agent.background_task()
|
||||
def __on_event(self, event: SyncEvent):
|
||||
try:
|
||||
can_process = event.mark_as_taken()
|
||||
if can_process:
|
||||
event_created_at = event.created_at
|
||||
start_time = arrow.now()
|
||||
success = self.__sink.process(event)
|
||||
if success:
|
||||
event_id = event.id
|
||||
SyncEvent.delete(event.id, commit=True)
|
||||
LOG.info(f"Marked {event_id} as done")
|
||||
event_created_at = event.created_at
|
||||
start_time = arrow.now()
|
||||
success = self.__sink.process(event)
|
||||
if success:
|
||||
event_id = event.id
|
||||
SyncEvent.delete(event.id, commit=True)
|
||||
LOG.info(f"Marked {event_id} as done")
|
||||
|
||||
end_time = arrow.now() - start_time
|
||||
time_between_taken_and_created = start_time - event_created_at
|
||||
end_time = arrow.now() - start_time
|
||||
time_between_taken_and_created = start_time - event_created_at
|
||||
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/sync_event_processed", 1
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/sync_event_process_time", end_time.total_seconds()
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/sync_event_elapsed_time",
|
||||
time_between_taken_and_created.total_seconds(),
|
||||
)
|
||||
else:
|
||||
LOG.info(f"{event.id} was handled by another runner")
|
||||
newrelic.agent.record_custom_metric("Custom/sync_event_processed", 1)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/sync_event_process_time", end_time.total_seconds()
|
||||
)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/sync_event_elapsed_time",
|
||||
time_between_taken_and_created.total_seconds(),
|
||||
)
|
||||
except Exception as e:
|
||||
LOG.warn(f"Exception processing event [id={event.id}]: {e}")
|
||||
newrelic.agent.record_custom_metric("Custom/sync_event_failed", 1)
|
||||
|
|
|
@ -93,11 +93,23 @@ def log_nb_db_connection():
|
|||
newrelic.agent.record_custom_metric("Custom/nb_db_connections", nb_connection)
|
||||
|
||||
|
||||
@newrelic.agent.background_task()
|
||||
def log_pending_to_process_events():
|
||||
r = Session.execute("select count(*) from sync_events WHERE taken_time IS NULL;")
|
||||
events_pending = list(r)[0][0]
|
||||
|
||||
LOG.d("number of events pending to process %s", events_pending)
|
||||
newrelic.agent.record_custom_metric(
|
||||
"Custom/sync_events_pending_to_process", events_pending
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
exporter = MetricExporter(get_newrelic_license())
|
||||
while True:
|
||||
log_postfix_metrics()
|
||||
log_nb_db_connection()
|
||||
log_pending_to_process_events()
|
||||
Session.close()
|
||||
|
||||
exporter.run()
|
||||
|
|
Loading…
Reference in a new issue