Make _state_groups_id_gen a normal IdGenerator

This commit is contained in:
Erik Johnston 2016-08-30 16:54:40 +01:00
parent 3e784eff74
commit 5dc2a702cf
3 changed files with 39 additions and 47 deletions

View File

@ -115,7 +115,7 @@ class DataStore(RoomMemberStore, RoomStore,
) )
self._transaction_id_gen = IdGenerator(db_conn, "sent_transactions", "id") self._transaction_id_gen = IdGenerator(db_conn, "sent_transactions", "id")
self._state_groups_id_gen = StreamIdGenerator(db_conn, "state_groups", "id") self._state_groups_id_gen = IdGenerator(db_conn, "state_groups", "id")
self._access_tokens_id_gen = IdGenerator(db_conn, "access_tokens", "id") self._access_tokens_id_gen = IdGenerator(db_conn, "access_tokens", "id")
self._refresh_tokens_id_gen = IdGenerator(db_conn, "refresh_tokens", "id") self._refresh_tokens_id_gen = IdGenerator(db_conn, "refresh_tokens", "id")
self._event_reports_id_gen = IdGenerator(db_conn, "event_reports", "id") self._event_reports_id_gen = IdGenerator(db_conn, "event_reports", "id")

View File

@ -271,13 +271,9 @@ class EventsStore(SQLBaseStore):
len(events_and_contexts) len(events_and_contexts)
) )
state_group_id_manager = self._state_groups_id_gen.get_next_mult(
len(events_and_contexts)
)
with stream_ordering_manager as stream_orderings: with stream_ordering_manager as stream_orderings:
with state_group_id_manager as state_group_ids: for (event, context), stream, in zip(
for (event, context), stream, state_group_id in zip( events_and_contexts, stream_orderings
events_and_contexts, stream_orderings, state_group_ids
): ):
event.internal_metadata.stream_ordering = stream event.internal_metadata.stream_ordering = stream
# Assign a state group_id in case a new id is needed for # Assign a state group_id in case a new id is needed for
@ -286,7 +282,7 @@ class EventsStore(SQLBaseStore):
# but that make the code more complicated. Assigning an ID # but that make the code more complicated. Assigning an ID
# per event only causes the state_group_ids to grow as fast # per event only causes the state_group_ids to grow as fast
# as the stream_ordering so in practise shouldn't be a problem. # as the stream_ordering so in practise shouldn't be a problem.
context.new_state_group_id = state_group_id context.new_state_group_id = self._state_groups_id_gen.get_next()
chunks = [ chunks = [
events_and_contexts[x:x + 100] events_and_contexts[x:x + 100]
@ -312,9 +308,8 @@ class EventsStore(SQLBaseStore):
delete_existing=False): delete_existing=False):
try: try:
with self._stream_id_gen.get_next() as stream_ordering: with self._stream_id_gen.get_next() as stream_ordering:
with self._state_groups_id_gen.get_next() as state_group_id:
event.internal_metadata.stream_ordering = stream_ordering event.internal_metadata.stream_ordering = stream_ordering
context.new_state_group_id = state_group_id context.new_state_group_id = self._state_groups_id_gen.get_next()
yield self.runInteraction( yield self.runInteraction(
"persist_event", "persist_event",
self._persist_event_txn, self._persist_event_txn,

View File

@ -526,6 +526,3 @@ class StateStore(SQLBaseStore):
return self.runInteraction( return self.runInteraction(
"get_all_new_state_groups", get_all_new_state_groups_txn "get_all_new_state_groups", get_all_new_state_groups_txn
) )
def get_state_stream_token(self):
return self._state_groups_id_gen.get_current_token()