\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.8.0\n"
+
+#: indico_citadel/plugin.py:23
+msgid "Search backend URL"
+msgstr ""
+
+#: indico_citadel/plugin.py:24
+msgid "Search backend token"
+msgstr ""
+
+#: indico_citadel/plugin.py:25
+msgid "Authentication token for the Search backend"
+msgstr ""
+
+#: indico_citadel/plugin.py:26
+msgid "Search owner role"
+msgstr ""
+
+#: indico_citadel/plugin.py:27
+msgid "Tika server URL"
+msgstr ""
diff --git a/citadel/indico_citadel/util.py b/citadel/indico_citadel/util.py
new file mode 100644
index 0000000..d000fed
--- /dev/null
+++ b/citadel/indico_citadel/util.py
@@ -0,0 +1,138 @@
+# This file is part of the Indico plugins.
+# Copyright (C) 2002 - 2021 CERN
+#
+# The Indico plugins are free software; you can redistribute
+# them and/or modify them under the terms of the MIT License;
+# see the LICENSE file for more details.
+
+import re
+import sys
+import threading
+from functools import wraps
+
+from flask import current_app
+from flask.globals import _app_ctx_stack
+
+
+def parallelize(func, entries, batch_size=200):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ iterable_lock = threading.Lock()
+ result_lock = threading.Lock()
+ abort = threading.Event()
+ finished = threading.Event()
+ results = []
+ app = current_app._get_current_object()
+ main_app_context = _app_ctx_stack.top
+ worker_exc_info = None
+
+ def worker(iterator):
+ nonlocal worker_exc_info
+ while not abort.is_set() and not finished.is_set():
+ try:
+ with iterable_lock:
+ with main_app_context:
+ item = next(iterator)
+ except StopIteration:
+ finished.set()
+ break
+
+ with app.app_context():
+ try:
+ res = func(item, *args, **kwargs)
+ except BaseException:
+ worker_exc_info = sys.exc_info()
+ finished.set()
+ return
+ with result_lock:
+ results.append(res)
+
+ it = iter(entries)
+ threads = [threading.Thread(target=worker, name=f'worker/{i}', args=(it,))
+ for i in enumerate(range(batch_size))]
+
+ for t in threads:
+ t.start()
+
+ try:
+ finished.wait()
+ except KeyboardInterrupt:
+ print('\nFinishing pending jobs before aborting')
+ abort.set()
+
+ for t in threads:
+ t.join()
+
+ if worker_exc_info:
+ raise worker_exc_info[1].with_traceback(worker_exc_info[2])
+
+ return results, abort.is_set()
+
+ return wrapper
+
+
+def format_query(query, placeholders):
+ """Format and split the query into keywords and placeholders.
+
+ https://cern-search.docs.cern.ch/usage/operations/#advanced-queries
+
+ :param query: search query
+ :param placeholders: placeholder whitelist
+ :returns escaped query
+ """
+ patt = r'(?:^|\s)({}):([^:"\s]+|"[^"]+")(?:$|\s)'.format('|'.join(map(re.escape, placeholders)))
+ idx = 0
+ keys = []
+ for match in re.finditer(patt, query):
+ placeholder = f'{placeholders[match.group(1)]}:{escape(match.group(2))}'
+ if idx != match.start():
+ keys.append(escape(query[idx:match.start()]))
+ keys.append(placeholder)
+ idx = match.end()
+
+ if idx != len(query):
+ keys.append(escape(query[idx:len(query)]))
+
+ return ' '.join(keys).strip()
+
+
+def format_filters(params, filters, range_filters):
+ """Extract any special placeholder filter, such as ranges, from the query params.
+
+ https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_ranges
+
+ :param params: The filter query params
+ :param filters: The filter whitelist
+ :param range_filters: The range filter whitelist
+ :returns: filters, extracted placeholders
+ """
+ _filters = {}
+ query = []
+ for k, v in params.items():
+ if k not in filters:
+ continue
+ if k in range_filters:
+ match = re.match(r'[[{].+ TO .+[]}]', v)
+ if match:
+ query.append(f'+{range_filters[k]}:{v}')
+ continue
+ _filters[k] = v
+ return _filters, ' '.join(query)
+
+
+def escape(query):
+ """Prepend all special ElasticSearch characters with a backslash."""
+ patt = r'([+\-=>=3.0.dev0
+ indico-plugin-livesync>=3.0.dev0
+
+[options.entry_points]
+indico.plugins =
+ citadel = indico_citadel.plugin:CitadelPlugin
+
+
+
+[pydocstyle]
+ignore = D100,D101,D102,D103,D104,D105,D107,D203,D213
diff --git a/citadel/setup.py b/citadel/setup.py
new file mode 100644
index 0000000..03c7604
--- /dev/null
+++ b/citadel/setup.py
@@ -0,0 +1,11 @@
+# This file is part of the Indico plugins.
+# Copyright (C) 2002 - 2021 CERN
+#
+# The Indico plugins are free software; you can redistribute
+# them and/or modify them under the terms of the MIT License;
+# see the LICENSE file for more details.
+
+from setuptools import setup
+
+
+setup()
diff --git a/livesync/indico_livesync/base.py b/livesync/indico_livesync/base.py
index 89b8639..95cce69 100644
--- a/livesync/indico_livesync/base.py
+++ b/livesync/indico_livesync/base.py
@@ -9,8 +9,13 @@ from flask_pluginengine import depends, trim_docstring
from sqlalchemy.orm import subqueryload
from indico.core.plugins import IndicoPlugin, PluginCategory
+from indico.modules.attachments.models.attachments import Attachment
from indico.modules.categories import Category
from indico.modules.categories.models.principals import CategoryPrincipal
+from indico.modules.events.contributions.models.contributions import Contribution
+from indico.modules.events.contributions.models.subcontributions import SubContribution
+from indico.modules.events.models.events import Event
+from indico.modules.events.notes.models.notes import EventNote
from indico.util.date_time import now_utc
from indico.util.decorators import classproperty
@@ -48,6 +53,9 @@ class LiveSyncBackendBase:
form = AgentForm
#: whether only one agent with this backend is allowed
unique = False
+ #: whether a reset can delete data on whatever backend is used as well or the user
+ #: needs to do it themself after doing a reset
+ reset_deletes_indexed_data = False
@classproperty
@classmethod
@@ -70,6 +78,25 @@ class LiveSyncBackendBase:
"""
self.agent = agent
+ def is_configured(self):
+ """Check whether the backend is properly configured.
+
+ If this returns False, running the initial export or queue
+ will not be possible.
+ """
+ return True
+
+ def check_queue_status(self):
+ """Return whether queue runs are allowed (or why not).
+
+ :return: ``allowed, reason`` tuple; the reason is None if runs are allowed.
+ """
+ if not self.is_configured():
+ return False, 'not configured'
+ if self.agent.initial_data_exported:
+ return True, None
+ return False, 'initial export not performed'
+
def fetch_records(self, count=None):
query = (self.agent.queue
.filter_by(processed=False)
@@ -84,26 +111,49 @@ class LiveSyncBackendBase:
"""
self.agent.last_run = now_utc()
- def run(self):
+ def process_queue(self, uploader):
+ """Process queued entries during an export run."""
+ records = self.fetch_records()
+ LiveSyncPlugin.logger.info(f'Uploading %d records via {self.uploader.__name__}', len(records))
+ uploader.run(records)
+
+ def run(self, verbose=False, from_cli=False):
"""Runs the livesync export"""
if self.uploader is None: # pragma: no cover
raise NotImplementedError
- records = self.fetch_records()
- uploader = self.uploader(self)
- LiveSyncPlugin.logger.info('Uploading %d records', len(records))
- uploader.run(records)
+ uploader = self.uploader(self, verbose=verbose, from_cli=from_cli)
+ self.process_queue(uploader)
self.update_last_run()
- def run_initial_export(self):
+ def get_initial_query(self, model_cls, force):
+ """Get the initial export query for a given model.
+
+ Supported models are `Event`, `Contribution`, `SubContribution`,
+ `Attachment` and `EventNote`.
+
+ :param model_cls: The model class to query
+ :param force: Whether the initial export was started with ``--force``
+ """
+ fn = {
+ Event: query_events,
+ Contribution: query_contributions,
+ SubContribution: query_subcontributions,
+ Attachment: query_attachments,
+ EventNote: query_notes,
+ }[model_cls]
+ return fn()
+
+ def run_initial_export(self, batch_size, force=False, verbose=False):
"""Runs the initial export.
This process is expected to take a very long time.
+ :return: True if everything was successful, False if not
"""
if self.uploader is None: # pragma: no cover
raise NotImplementedError
- uploader = self.uploader(self)
+ uploader = self.uploader(self, verbose=verbose, from_cli=True)
Category.allow_relationship_preloading = True
Category.preload_relationships(Category.query, 'acl_entries',
@@ -111,13 +161,54 @@ class LiveSyncBackendBase:
CategoryPrincipal))
_category_cache = Category.query.all() # noqa: F841
- events = query_events()
- uploader.run_initial(events.yield_per(5000), events.count())
- contributions = query_contributions()
- uploader.run_initial(contributions.yield_per(5000), contributions.count())
- subcontributions = query_subcontributions()
- uploader.run_initial(subcontributions.yield_per(5000), subcontributions.count())
- attachments = query_attachments()
- uploader.run_initial(attachments.yield_per(5000), attachments.count())
- notes = query_notes()
- uploader.run_initial(notes.yield_per(5000), notes.count())
+ events = self.get_initial_query(Event, force)
+ contributions = self.get_initial_query(Contribution, force)
+ subcontributions = self.get_initial_query(SubContribution, force)
+ attachments = self.get_initial_query(Attachment, force)
+ notes = self.get_initial_query(EventNote, force)
+
+ print('Exporting events')
+ if not uploader.run_initial(events.yield_per(batch_size), events.count()):
+ print('Initial export of events failed')
+ return False
+ print('Exporting contributions')
+ if not uploader.run_initial(contributions.yield_per(batch_size), contributions.count()):
+ print('Initial export of contributions failed')
+ return False
+ print('Exporting subcontributions')
+ if not uploader.run_initial(subcontributions.yield_per(batch_size), subcontributions.count()):
+ print('Initial export of subcontributions failed')
+ return False
+ print('Exporting attachments')
+ if not uploader.run_initial(attachments.yield_per(batch_size), attachments.count()):
+ print('Initial export of attachments failed')
+ return False
+ print('Exporting notes')
+ if not uploader.run_initial(notes.yield_per(batch_size), notes.count()):
+ print('Initial export of notes failed')
+ return False
+ return True
+
+ def check_reset_status(self):
+ """Return whether a reset is allowed (or why not).
+
+ When resetting is not allowed, the message indicates why this is the case.
+
+ :return: ``allowed, reason`` tuple; the reason is None if resetting is allowed.
+ """
+ if not self.agent.queue.has_rows() and not self.agent.initial_data_exported:
+ return False, 'There is nothing to reset'
+ return True, None
+
+ def reset(self):
+ """Perform a full reset of all data related to the backend.
+
+ This deletes all queued changes, resets the initial export state back
+ to pending and do any other backend-specific tasks that may be required.
+
+ It is not necessary to delete the actual search indexes (which are possibly
+ on a remote service), but if your backend has the ability to do it you may
+ want to do it and display a message to the user indicating this.
+ """
+ self.agent.initial_data_exported = False
+ self.agent.queue.delete()
diff --git a/livesync/indico_livesync/cli.py b/livesync/indico_livesync/cli.py
index f2dff1c..2c9ddce 100644
--- a/livesync/indico_livesync/cli.py
+++ b/livesync/indico_livesync/cli.py
@@ -5,11 +5,14 @@
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
+import time
+
import click
from flask_pluginengine import current_plugin
from terminaltables import AsciiTable
from indico.cli.core import cli_group
+from indico.core.config import config
from indico.core.db import db
from indico.util.console import cformat
@@ -34,18 +37,23 @@ def agents():
"""Lists the currently active agents"""
print('The following LiveSync agents are active:')
agent_list = LiveSyncAgent.query.order_by(LiveSyncAgent.backend_name, db.func.lower(LiveSyncAgent.name)).all()
- table_data = [['ID', 'Name', 'Backend', 'Initial Export', 'Queue']]
+ table_data = [['ID', 'Name', 'Backend', 'Queue', 'Status']]
for agent in agent_list:
- initial = (cformat('%{green!}done%{reset}') if agent.initial_data_exported else
- cformat('%{yellow!}pending%{reset}'))
if agent.backend is None:
backend_title = cformat('%{red!}invalid backend ({})%{reset}').format(agent.backend_name)
+ queue_status = 'n/a'
else:
backend_title = agent.backend.title
- table_data.append([str(agent.id), agent.name, backend_title, initial,
- str(agent.queue.filter_by(processed=False).count())])
+ backend = agent.create_backend()
+ queue_allowed, reason = backend.check_queue_status()
+ if queue_allowed:
+ queue_status = cformat('%{green!}ready%{reset}')
+ else:
+ queue_status = cformat('%{yellow!}{}%{reset}').format(reason)
+ table_data.append([str(agent.id), agent.name, backend_title,
+ str(agent.queue.filter_by(processed=False).count()), queue_status])
table = AsciiTable(table_data)
- table.justify_columns[4] = 'right'
+ table.justify_columns[3] = 'right'
print(table.table)
if not all(a.initial_data_exported for a in agent_list):
print()
@@ -56,33 +64,52 @@ def agents():
@cli.command()
@click.argument('agent_id', type=int)
-@click.option('--force', is_flag=True, help="Perform export even if it has already been done once.")
-def initial_export(agent_id, force):
+@click.option('--force', '-f', is_flag=True, help="Perform export even if it has already been done once.")
+@click.option('--verbose', '-v', is_flag=True, help="Be more verbose (what this does is up to the backend)")
+@click.option('--batch', type=int, default=5000, help="The amount of records yielded per export batch.",
+ show_default=True, metavar='N')
+def initial_export(agent_id, batch, force, verbose):
"""Performs the initial data export for an agent"""
agent = LiveSyncAgent.get(agent_id)
if agent is None:
print('No such agent')
return
+
if agent.backend is None:
print(cformat('Cannot run agent %{red!}{}%{reset} (backend not found)').format(agent.name))
return
+
print(cformat('Selected agent: %{white!}{}%{reset} ({})').format(agent.name, agent.backend.title))
+
+ backend = agent.create_backend()
+ if not backend.is_configured():
+ print(cformat('Agent %{red!}{}%{reset} is not properly configured').format(agent.name))
+ return
+
if agent.initial_data_exported and not force:
print('The initial export has already been performed for this agent.')
print(cformat('To re-run it, use %{yellow!}--force%{reset}'))
return
- backend = agent.create_backend()
- backend.run_initial_export()
+ if not backend.run_initial_export(batch, force, verbose):
+ print('The initial export failed; not marking it as done')
+ return
+
agent.initial_data_exported = True
db.session.commit()
@cli.command()
@click.argument('agent_id', type=int, required=False)
-@click.option('--force', is_flag=True, help="Run even if initial export was not done")
-def run(agent_id, force=False):
+@click.option('--force', '-f', is_flag=True, help="Run even if initial export was not done")
+@click.option('--verbose', '-v', is_flag=True, help="Be more verbose (what this does is up to the backend)")
+def run(agent_id, force, verbose):
"""Runs the livesync agent"""
+ from indico_livesync.plugin import LiveSyncPlugin
+ if LiveSyncPlugin.settings.get('disable_queue_runs'):
+ print(cformat('%{yellow!}Queue runs are disabled%{reset}'))
+ return
+
if agent_id is None:
agent_list = LiveSyncAgent.query.all()
else:
@@ -96,13 +123,60 @@ def run(agent_id, force=False):
if agent.backend is None:
print(cformat('Skipping agent: %{red!}{}%{reset} (backend not found)').format(agent.name))
continue
- if not agent.initial_data_exported and not force:
- print(cformat('Skipping agent: %{red!}{}%{reset} (initial export not performed)').format(agent.name))
+ backend = agent.create_backend()
+ queue_allowed, reason = backend.check_queue_status()
+ if not queue_allowed and not force:
+ print(cformat('Skipping agent: %{red!}{}%{reset} ({})').format(agent.name, reason))
continue
print(cformat('Running agent: %{white!}{}%{reset}').format(agent.name))
try:
- agent.create_backend().run()
+ backend.run(verbose, from_cli=True)
db.session.commit()
except Exception:
db.session.rollback()
raise
+
+
+@cli.command()
+@click.argument('agent_id', type=int)
+def reset(agent_id):
+ """Performs the initial data export for an agent"""
+ agent = LiveSyncAgent.get(agent_id)
+ if agent is None:
+ print('No such agent')
+ return
+
+ if agent.backend is None:
+ print(cformat('Cannot run agent %{red!}{}%{reset} (backend not found)').format(agent.name))
+ return
+
+ backend = agent.create_backend()
+ reset_allowed, message = backend.check_reset_status()
+
+ if not reset_allowed:
+ print(f'Resetting is not possible: {message}')
+ return
+
+ print(cformat('Selected agent: %{white!}{}%{reset} ({})').format(agent.name, backend.title))
+ print(cformat('%{yellow!}!!! %{red!}DANGER %{yellow!}!!!%{reset}'))
+ if backend.reset_deletes_indexed_data:
+ print(cformat('%{yellow!}This command will delete all indexed data on this backend.%{reset}')
+ .format(backend.title))
+ else:
+ print(cformat('%{yellow!}This command should only be used if the data on this backend '
+ 'has been deleted.%{reset}')
+ .format(backend.title))
+ print(cformat('%{yellow!}After resetting you need to perform a new initial export.%{reset}'))
+ click.confirm(click.style('Do you really want to perform the reset?', fg='red', bold=True),
+ default=False, abort=True)
+ if not config.DEBUG:
+ click.confirm(click.style('Are you absolutely sure?', fg='red', bold=True), default=False, abort=True)
+ for i in range(5):
+ print(cformat('\rResetting in %{white!}{}%{reset}s (CTRL+C to abort)').format(5 - i), end='')
+ time.sleep(1)
+ print('')
+
+ backend.reset()
+ db.session.commit()
+ print(cformat('Reset complete; run %{green!}indico livesync initial-export {}%{reset} for a new export')
+ .format(agent.id))
diff --git a/livesync/indico_livesync/controllers.py b/livesync/indico_livesync/controllers.py
index ecd76de..8428aa1 100644
--- a/livesync/indico_livesync/controllers.py
+++ b/livesync/indico_livesync/controllers.py
@@ -7,6 +7,7 @@
from flask import flash, redirect, request
from flask_pluginengine import current_plugin, render_plugin_template
+from sqlalchemy.orm.attributes import flag_modified
from werkzeug.exceptions import NotFound
from indico.core.db import db
@@ -81,7 +82,9 @@ class RHEditAgent(RHAdminBase):
if form.validate_on_submit():
data = form.data
self.agent.name = data.pop('name')
- self.agent.settings = data
+ if data:
+ self.agent.settings.update(data)
+ flag_modified(self.agent, 'settings')
flash(_('Agent updated'), 'success')
return jsonify_data(flash=False)
diff --git a/livesync/indico_livesync/export_schemas.py b/livesync/indico_livesync/export_schemas.py
new file mode 100644
index 0000000..e04c5cb
--- /dev/null
+++ b/livesync/indico_livesync/export_schemas.py
@@ -0,0 +1,116 @@
+# This file is part of the Indico plugins.
+# Copyright (C) 2002 - 2021 CERN
+#
+# The Indico plugins are free software; you can redistribute
+# them and/or modify them under the terms of the MIT License;
+# see the LICENSE file for more details.
+
+from marshmallow import fields
+
+from indico.core.db.sqlalchemy.links import LinkType
+from indico.core.marshmallow import mm
+from indico.modules.attachments import Attachment
+from indico.modules.categories import Category
+from indico.modules.events.contributions import Contribution
+from indico.modules.events.contributions.models.subcontributions import SubContribution
+from indico.modules.events.notes.models.notes import EventNote
+from indico.modules.search.base import SearchTarget
+from indico.modules.search.schemas import LocationSchema, PersonSchema
+from indico.util.marshmallow import NoneRemovingList
+from indico.web.flask.util import url_for
+
+
+class CategorySchema(mm.SQLAlchemyAutoSchema):
+ class Meta:
+ model = Category
+ fields = ('id', 'title', 'url')
+
+ url = fields.Function(lambda c: url_for('categories.display', category_id=c['id']))
+
+
+class AttachmentSchema(mm.SQLAlchemyAutoSchema):
+ class Meta:
+ model = Attachment
+ fields = ('attachment_id', 'folder_id', 'type', 'attachment_type', 'title', 'filename', 'event_id',
+ 'contribution_id', 'subcontribution_id', 'user', 'url', 'category_id', 'category_path',
+ 'modified_dt')
+
+ attachment_id = fields.Int(attribute='id')
+ folder_id = fields.Int(attribute='folder_id')
+ type = fields.Constant(SearchTarget.attachment.name)
+ attachment_type = fields.String(attribute='type.name')
+ filename = fields.String(attribute='file.filename')
+ event_id = fields.Int(attribute='folder.event.id')
+ contribution_id = fields.Method('_contribution_id')
+ subcontribution_id = fields.Int(attribute='folder.subcontribution_id')
+ user = fields.Nested(PersonSchema)
+ category_id = fields.Int(attribute='folder.event.category_id')
+ category_path = fields.List(fields.Nested(CategorySchema), attribute='folder.event.detailed_category_chain')
+ url = fields.String(attribute='download_url')
+
+ def _contribution_id(self, attachment):
+ if attachment.folder.link_type == LinkType.contribution:
+ return attachment.folder.contribution_id
+ elif attachment.folder.link_type == LinkType.subcontribution:
+ return attachment.folder.subcontribution.contribution_id
+ return None
+
+
+class ContributionSchema(mm.SQLAlchemyAutoSchema):
+ class Meta:
+ model = Contribution
+ fields = ('contribution_id', 'type', 'contribution_type', 'event_id', 'title', 'description', 'location',
+ 'persons', 'url', 'category_id', 'category_path', 'start_dt', 'end_dt', 'duration')
+
+ contribution_id = fields.Int(attribute='id')
+ type = fields.Constant(SearchTarget.contribution.name)
+ contribution_type = fields.String(attribute='type.name')
+ location = fields.Function(lambda contrib: LocationSchema().dump(contrib))
+ persons = NoneRemovingList(fields.Nested(PersonSchema), attribute='person_links')
+ category_id = fields.Int(attribute='event.category_id')
+ category_path = fields.List(fields.Nested(CategorySchema), attribute='event.detailed_category_chain')
+ url = fields.Function(lambda contrib: url_for('contributions.display_contribution', contrib, _external=False))
+ duration = fields.TimeDelta(precision=fields.TimeDelta.MINUTES)
+
+
+class SubContributionSchema(mm.SQLAlchemyAutoSchema):
+ class Meta:
+ model = SubContribution
+ fields = ('subcontribution_id', 'type', 'title', 'description', 'event_id', 'contribution_id', 'persons',
+ 'location', 'url', 'category_id', 'category_path', 'start_dt', 'end_dt', 'duration')
+
+ subcontribution_id = fields.Int(attribute='id')
+ type = fields.Constant(SearchTarget.subcontribution.name)
+ event_id = fields.Int(attribute='contribution.event_id')
+ persons = NoneRemovingList(fields.Nested(PersonSchema), attribute='person_links')
+ location = fields.Function(lambda subc: LocationSchema().dump(subc.contribution))
+ category_id = fields.Int(attribute='event.category_id')
+ category_path = fields.List(fields.Nested(CategorySchema), attribute='event.detailed_category_chain')
+ url = fields.Function(lambda subc: url_for('contributions.display_subcontribution', subc, _external=False))
+ start_dt = fields.DateTime(attribute='contribution.start_dt')
+ end_dt = fields.DateTime(attribute='contribution.end_dt')
+ duration = fields.TimeDelta(precision=fields.TimeDelta.MINUTES)
+
+
+class EventNoteSchema(mm.SQLAlchemyAutoSchema):
+ class Meta:
+ model = EventNote
+ fields = ('note_id', 'type', 'content', 'event_id', 'contribution_id', 'subcontribution_id', 'url',
+ 'category_id', 'category_path', 'modified_dt', 'user')
+
+ note_id = fields.Int(attribute='id')
+ type = fields.Constant(SearchTarget.event_note.name)
+ content = fields.Str(attribute='current_revision.source')
+ contribution_id = fields.Method('_contribution_id')
+ subcontribution_id = fields.Int()
+ category_id = fields.Int(attribute='event.category_id')
+ category_path = fields.List(fields.Nested(CategorySchema), attribute='event.detailed_category_chain')
+ url = fields.Function(lambda note: url_for('event_notes.view', note, _external=False))
+ modified_dt = fields.DateTime(attribute='current_revision.created_dt')
+ user = fields.Nested(PersonSchema, attribute='current_revision.user')
+
+ def _contribution_id(self, note):
+ if note.link_type == LinkType.contribution:
+ return note.contribution_id
+ elif note.link_type == LinkType.subcontribution:
+ return note.subcontribution.contribution_id
diff --git a/livesync/indico_livesync/export_schemas_test.py b/livesync/indico_livesync/export_schemas_test.py
new file mode 100644
index 0000000..9d0a135
--- /dev/null
+++ b/livesync/indico_livesync/export_schemas_test.py
@@ -0,0 +1,183 @@
+# This file is part of the Indico plugins.
+# Copyright (C) 2002 - 2021 CERN
+#
+# The Indico plugins are free software; you can redistribute
+# them and/or modify them under the terms of the MIT License;
+# see the LICENSE file for more details.
+
+from datetime import datetime, timedelta
+from io import BytesIO
+
+import pytest
+from pytz import utc
+
+from indico.modules.attachments.models.attachments import Attachment, AttachmentFile, AttachmentType
+from indico.modules.attachments.models.folders import AttachmentFolder
+from indico.modules.events.contributions.models.persons import ContributionPersonLink, SubContributionPersonLink
+from indico.modules.events.contributions.models.subcontributions import SubContribution
+from indico.modules.events.models.persons import EventPerson
+from indico.modules.events.notes.models.notes import EventNote, RenderMode
+
+
+pytest_plugins = 'indico.modules.events.timetable.testing.fixtures'
+
+
+@pytest.mark.parametrize('scheduled', (False, True))
+def test_dump_contribution(db, dummy_user, dummy_event, dummy_contribution, create_entry, scheduled):
+ from .export_schemas import ContributionSchema
+
+ person = EventPerson.create_from_user(dummy_user, dummy_event)
+ dummy_contribution.person_links.append(ContributionPersonLink(person=person))
+ dummy_contribution.description = 'A dummy contribution'
+
+ extra = {'start_dt': None, 'end_dt': None}
+ if scheduled:
+ create_entry(dummy_contribution, utc.localize(datetime(2020, 4, 20, 4, 20)))
+ extra = {
+ 'start_dt': dummy_contribution.start_dt.isoformat(),
+ 'end_dt': dummy_contribution.end_dt.isoformat(),
+ }
+
+ db.session.flush()
+ category_id = dummy_contribution.event.category_id
+ schema = ContributionSchema()
+ assert schema.dump(dummy_contribution) == {
+ 'description': 'A dummy contribution',
+ 'location': {'address': '', 'room_name': '', 'venue_name': ''},
+ 'persons': [{'affiliation': None, 'name': 'Guinea Pig'}],
+ 'title': 'Dummy Contribution',
+ 'category_id': category_id,
+ 'category_path': [
+ {'id': 0, 'title': 'Home', 'url': '/'},
+ {'id': category_id, 'title': 'dummy', 'url': f'/category/{category_id}/'},
+ ],
+ 'contribution_id': dummy_contribution.id,
+ 'duration': 20,
+ 'event_id': 0,
+ 'type': 'contribution',
+ 'url': f'/event/0/contributions/{dummy_contribution.id}/',
+ **extra
+ }
+
+
+@pytest.mark.parametrize('scheduled', (False, True))
+def test_dump_subcontribution(db, dummy_user, dummy_event, dummy_contribution, create_entry, scheduled):
+ from .export_schemas import SubContributionSchema
+
+ extra = {'start_dt': None, 'end_dt': None}
+ if scheduled:
+ create_entry(dummy_contribution, utc.localize(datetime(2020, 4, 20, 4, 20)))
+ extra = {
+ 'start_dt': dummy_contribution.start_dt.isoformat(),
+ 'end_dt': dummy_contribution.end_dt.isoformat(),
+ }
+
+ subcontribution = SubContribution(contribution=dummy_contribution, title='Dummy Subcontribution',
+ description='A dummy subcontribution',
+ duration=timedelta(minutes=10))
+
+ person = EventPerson.create_from_user(dummy_user, dummy_event)
+ subcontribution.person_links.append(SubContributionPersonLink(person=person))
+
+ db.session.flush()
+ category_id = dummy_contribution.event.category_id
+ schema = SubContributionSchema()
+ assert schema.dump(subcontribution) == {
+ 'description': 'A dummy subcontribution',
+ 'location': {'address': '', 'room_name': '', 'venue_name': ''},
+ 'persons': [{'affiliation': None, 'name': 'Guinea Pig'}],
+ 'title': 'Dummy Subcontribution',
+ 'category_id': category_id,
+ 'category_path': [
+ {'id': 0, 'title': 'Home', 'url': '/'},
+ {'id': category_id, 'title': 'dummy', 'url': f'/category/{category_id}/'},
+ ],
+ 'contribution_id': dummy_contribution.id,
+ 'duration': 10,
+ 'event_id': 0,
+ 'subcontribution_id': subcontribution.id,
+ 'type': 'subcontribution',
+ 'url': f'/event/0/contributions/{dummy_contribution.id}/subcontributions/{subcontribution.id}',
+ **extra
+ }
+
+
+def test_dump_attachment(db, dummy_user, dummy_contribution):
+ from .export_schemas import AttachmentSchema
+
+ folder = AttachmentFolder(title='Dummy Folder', description='a dummy folder')
+ file = AttachmentFile(user=dummy_user, filename='dummy_file.txt', content_type='text/plain')
+ attachment = Attachment(folder=folder, user=dummy_user, title='Dummy Attachment', type=AttachmentType.file,
+ file=file)
+ attachment.folder.object = dummy_contribution
+ attachment.file.save(BytesIO(b'hello world'))
+ db.session.flush()
+
+ category_id = dummy_contribution.event.category_id
+ schema = AttachmentSchema()
+ assert schema.dump(attachment) == {
+ 'filename': 'dummy_file.txt',
+ 'title': 'Dummy Attachment',
+ 'user': {'affiliation': None, 'name': 'Guinea Pig'},
+ 'attachment_id': attachment.id,
+ 'attachment_type': 'file',
+ 'category_id': category_id,
+ 'category_path': [
+ {'id': 0, 'title': 'Home', 'url': '/'},
+ {'id': category_id, 'title': 'dummy', 'url': f'/category/{category_id}/'},
+ ],
+ 'contribution_id': dummy_contribution.id,
+ 'subcontribution_id': None,
+ 'event_id': 0,
+ 'folder_id': folder.id,
+ 'modified_dt': attachment.modified_dt.isoformat(),
+ 'type': 'attachment',
+ 'url': (
+ f'/event/0/contributions/'
+ f'{dummy_contribution.id}/attachments/{folder.id}/{attachment.id}/dummy_file.txt'
+ ),
+ }
+
+
+@pytest.mark.parametrize('link_type', ('event', 'contrib', 'subcontrib'))
+def test_dump_event_note(db, dummy_user, dummy_event, dummy_contribution, link_type):
+ from .export_schemas import EventNoteSchema
+
+ if link_type == 'event':
+ ids = {'contribution_id': None, 'subcontribution_id': None}
+ note = EventNote(object=dummy_event)
+ url = '/event/0/note/'
+ elif link_type == 'contrib':
+ ids = {'contribution_id': dummy_contribution.id, 'subcontribution_id': None}
+ note = EventNote(object=dummy_contribution)
+ url = f'/event/0/contributions/{dummy_contribution.id}/note/'
+ elif link_type == 'subcontrib':
+ subcontribution = SubContribution(contribution=dummy_contribution, title='Dummy Subcontribution',
+ duration=timedelta(minutes=10))
+ db.session.flush()
+ ids = {
+ 'contribution_id': subcontribution.contribution_id,
+ 'subcontribution_id': subcontribution.id,
+ }
+ note = EventNote(object=subcontribution)
+ url = f'/event/0/contributions/{dummy_contribution.id}/subcontributions/{subcontribution.id}/note/'
+
+ note.create_revision(RenderMode.html, 'this is a dummy note', dummy_user)
+ db.session.flush()
+ category_id = dummy_event.category_id
+ schema = EventNoteSchema()
+ assert schema.dump(note) == {
+ 'content': 'this is a dummy note',
+ 'user': {'affiliation': None, 'name': 'Guinea Pig'},
+ 'category_id': category_id,
+ 'category_path': [
+ {'id': 0, 'title': 'Home', 'url': '/'},
+ {'id': category_id, 'title': 'dummy', 'url': f'/category/{category_id}/'},
+ ],
+ 'modified_dt': note.current_revision.created_dt.isoformat(),
+ 'event_id': 0,
+ 'note_id': note.id,
+ 'type': 'event_note',
+ 'url': url,
+ **ids
+ }
diff --git a/livesync/indico_livesync/handler.py b/livesync/indico_livesync/handler.py
index d27e7a3..5a4cb93 100644
--- a/livesync/indico_livesync/handler.py
+++ b/livesync/indico_livesync/handler.py
@@ -11,12 +11,17 @@ from flask import g
from sqlalchemy import inspect
from indico.core import signals
+from indico.core.db.sqlalchemy.links import LinkType
from indico.core.db.sqlalchemy.protection import ProtectionMode
+from indico.modules.attachments.models.attachments import Attachment
+from indico.modules.attachments.models.folders import AttachmentFolder
from indico.modules.categories.models.categories import Category
from indico.modules.events import Event
from indico.modules.events.contributions.models.contributions import Contribution
from indico.modules.events.contributions.models.subcontributions import SubContribution
+from indico.modules.events.notes.models.notes import EventNote
from indico.modules.events.sessions import Session
+from indico.modules.events.sessions.models.blocks import SessionBlock
from indico_livesync.models.queue import ChangeType, LiveSyncQueueEntry
from indico_livesync.util import get_excluded_categories, obj_ref
@@ -42,6 +47,12 @@ def connect_signals(plugin):
plugin.connect(signals.event.subcontribution_updated, _updated)
# event times
plugin.connect(signals.event.times_changed, _event_times_changed, sender=Event)
+ plugin.connect(signals.event.times_changed, _event_times_changed, sender=Contribution)
+ # location
+ plugin.connect(signals.event.location_changed, _location_changed, sender=Event)
+ plugin.connect(signals.event.location_changed, _location_changed, sender=Contribution)
+ plugin.connect(signals.event.location_changed, _location_changed, sender=Session)
+ plugin.connect(signals.event.location_changed, _session_block_location_changed, sender=SessionBlock)
# timetable
plugin.connect(signals.event.timetable_entry_created, _timetable_changed)
plugin.connect(signals.event.timetable_entry_updated, _timetable_changed)
@@ -57,14 +68,18 @@ def connect_signals(plugin):
plugin.connect(signals.acl.entry_changed, _acl_entry_changed, sender=Session)
plugin.connect(signals.acl.entry_changed, _acl_entry_changed, sender=Contribution)
# notes
- plugin.connect(signals.event.notes.note_added, _note_changed)
- plugin.connect(signals.event.notes.note_deleted, _note_changed)
- plugin.connect(signals.event.notes.note_modified, _note_changed)
+ plugin.connect(signals.event.notes.note_added, _created)
+ plugin.connect(signals.event.notes.note_deleted, _deleted)
+ plugin.connect(signals.event.notes.note_modified, _updated)
# attachments
- plugin.connect(signals.attachments.folder_deleted, _attachment_changed)
- plugin.connect(signals.attachments.attachment_created, _attachment_changed)
- plugin.connect(signals.attachments.attachment_deleted, _attachment_changed)
- plugin.connect(signals.attachments.attachment_updated, _attachment_changed)
+ plugin.connect(signals.attachments.folder_deleted, _attachment_folder_deleted)
+ plugin.connect(signals.attachments.attachment_created, _created)
+ plugin.connect(signals.attachments.attachment_deleted, _deleted)
+ plugin.connect(signals.attachments.attachment_updated, _updated)
+ plugin.connect(signals.acl.protection_changed, _attachment_folder_protection_changed, sender=AttachmentFolder)
+ plugin.connect(signals.acl.protection_changed, _protection_changed, sender=Attachment)
+ plugin.connect(signals.acl.entry_changed, _attachment_folder_acl_entry_changed, sender=AttachmentFolder)
+ plugin.connect(signals.acl.entry_changed, _acl_entry_changed, sender=Attachment)
def _moved(obj, old_parent, **kwargs):
@@ -80,7 +95,7 @@ def _moved(obj, old_parent, **kwargs):
def _created(obj, **kwargs):
- if isinstance(obj, Event):
+ if isinstance(obj, (Event, EventNote, Attachment)):
parent = None
elif isinstance(obj, Contribution):
parent = obj.event
@@ -105,12 +120,21 @@ def _event_times_changed(sender, obj, **kwargs):
_register_change(obj, ChangeType.data_changed)
+def _session_block_location_changed(sender, obj, **kwargs):
+ for contrib in obj.contributions:
+ _register_change(contrib, ChangeType.location_changed)
+
+
+def _location_changed(sender, obj, **kwargs):
+ _register_change(obj, ChangeType.location_changed)
+
+
def _timetable_changed(entry, **kwargs):
_register_change(entry.event, ChangeType.data_changed)
def _category_protection_changed(sender, obj, mode, old_mode, **kwargs):
- parent_mode = obj.protection_parent.effective_protection_mode
+ parent_mode = obj.protection_parent.effective_protection_mode if obj.protection_parent else None
if ((old_mode == ProtectionMode.inheriting and parent_mode == mode) or
(old_mode == parent_mode and mode == ProtectionMode.inheriting)):
return
@@ -142,15 +166,29 @@ def _acl_entry_changed(sender, obj, entry, old_data, **kwargs):
_register_change(obj, ChangeType.protection_changed)
-def _note_changed(note, **kwargs):
- obj = note.event if isinstance(note.object, Session) else note.object
- _register_change(obj, ChangeType.data_changed)
+def _attachment_folder_deleted(folder, **kwargs):
+ if folder.link_type not in (LinkType.event, LinkType.contribution, LinkType.subcontribution):
+ return
+ for attachment in folder.attachments:
+ _register_deletion(attachment)
-def _attachment_changed(attachment_or_folder, **kwargs):
- folder = getattr(attachment_or_folder, 'folder', attachment_or_folder)
- if not isinstance(folder.object, Category) and not isinstance(folder.object, Session):
- _register_change(folder.object.event, ChangeType.data_changed)
+def _attachment_folder_protection_changed(sender, obj, **kwargs):
+ if not inspect(obj).persistent:
+ return
+ if obj.link_type not in (LinkType.event, LinkType.contribution, LinkType.subcontribution):
+ return
+ for attachment in obj.attachments:
+ _register_change(attachment, ChangeType.protection_changed)
+
+
+def _attachment_folder_acl_entry_changed(sender, obj, entry, old_data, **kwargs):
+ if not inspect(obj).persistent:
+ return
+ if obj.link_type not in (LinkType.event, LinkType.contribution, LinkType.subcontribution):
+ return
+ for attachment in obj.attachments:
+ _acl_entry_changed(type(attachment), attachment, entry, old_data)
def _apply_changes(sender, **kwargs):
@@ -168,7 +206,7 @@ def _register_deletion(obj):
def _register_change(obj, action):
if not isinstance(obj, Category):
- event = obj.event
+ event = obj.folder.event if isinstance(obj, Attachment) else obj.event
if event is None or event.is_deleted:
# When deleting an event we get data change signals afterwards. We can simple ignore them.
# Also, ACL changes during user merges might involve deleted objects which we also don't care about
diff --git a/livesync/indico_livesync/initial.py b/livesync/indico_livesync/initial.py
index d540822..bb73c85 100644
--- a/livesync/indico_livesync/initial.py
+++ b/livesync/indico_livesync/initial.py
@@ -21,6 +21,8 @@ from indico.modules.events.sessions import Session
from indico.modules.events.sessions.models.blocks import SessionBlock
from indico.modules.events.sessions.models.principals import SessionPrincipal
+from indico_livesync.util import get_excluded_categories
+
def apply_acl_entry_strategy(rel, principal):
user_strategy = rel.joinedload('user')
@@ -38,13 +40,20 @@ def apply_acl_entry_strategy(rel, principal):
return rel
+def _get_excluded_category_filter(event_model=Event):
+ if excluded_category_ids := get_excluded_categories():
+ return event_model.category_id.notin_(excluded_category_ids)
+ return True
+
+
def query_events():
return (
Event.query
.filter_by(is_deleted=False)
+ .filter(_get_excluded_category_filter())
.options(
apply_acl_entry_strategy(selectinload(Event.acl_entries), EventPrincipal),
- selectinload(Event.person_links),
+ selectinload(Event.person_links).joinedload('person').joinedload('user').load_only('is_system'),
joinedload(Event.own_venue),
joinedload(Event.own_room).options(raiseload('*'), joinedload('location')),
)
@@ -73,10 +82,10 @@ def query_contributions():
return (
Contribution.query
.join(Event)
- .filter(~Contribution.is_deleted, ~Event.is_deleted)
+ .filter(~Contribution.is_deleted, ~Event.is_deleted, _get_excluded_category_filter())
.options(
selectinload(Contribution.acl_entries),
- selectinload(Contribution.person_links),
+ selectinload(Contribution.person_links).joinedload('person').joinedload('user').load_only('is_system'),
event_strategy,
session_strategy,
session_block_strategy,
@@ -97,6 +106,7 @@ def query_subcontributions():
contrib_strategy = contains_eager(SubContribution.contribution)
contrib_strategy.joinedload(Contribution.own_venue)
contrib_strategy.joinedload(Contribution.own_room).options(raiseload('*'), joinedload('location'))
+ contrib_strategy.joinedload(Contribution.timetable_entry)
apply_acl_entry_strategy(contrib_strategy.selectinload(Contribution.acl_entries), ContributionPrincipal)
event_strategy = contrib_strategy.contains_eager(Contribution.event.of_type(contrib_event))
@@ -119,9 +129,10 @@ def query_subcontributions():
.join(Contribution.event.of_type(contrib_event))
.outerjoin(Contribution.session.of_type(contrib_session))
.outerjoin(Contribution.session_block.of_type(contrib_block))
- .filter(~SubContribution.is_deleted, ~Contribution.is_deleted, ~contrib_event.is_deleted)
+ .filter(~SubContribution.is_deleted, ~Contribution.is_deleted, ~contrib_event.is_deleted,
+ _get_excluded_category_filter(contrib_event))
.options(
- selectinload(SubContribution.person_links),
+ selectinload(SubContribution.person_links).joinedload('person').joinedload('user').load_only('is_system'),
contrib_strategy,
event_strategy,
session_strategy,
@@ -190,19 +201,22 @@ def query_attachments():
.filter(AttachmentFolder.link_type != LinkType.category)
.filter(db.or_(
AttachmentFolder.link_type != LinkType.event,
- ~Event.is_deleted
+ ~Event.is_deleted & _get_excluded_category_filter(),
))
.filter(db.or_(
AttachmentFolder.link_type != LinkType.contribution,
- ~Contribution.is_deleted & ~contrib_event.is_deleted
+ ~Contribution.is_deleted & ~contrib_event.is_deleted & _get_excluded_category_filter(contrib_event)
))
.filter(db.or_(
AttachmentFolder.link_type != LinkType.subcontribution,
- ~SubContribution.is_deleted & ~subcontrib_contrib.is_deleted & ~subcontrib_event.is_deleted
+ db.and_(~SubContribution.is_deleted,
+ ~subcontrib_contrib.is_deleted,
+ ~subcontrib_event.is_deleted,
+ _get_excluded_category_filter(subcontrib_event))
))
.filter(db.or_(
AttachmentFolder.link_type != LinkType.session,
- ~Session.is_deleted & ~session_event.is_deleted
+ ~Session.is_deleted & ~session_event.is_deleted & _get_excluded_category_filter(session_event)
))
.order_by(Attachment.id)
)
@@ -261,23 +275,26 @@ def query_notes():
.filter(~EventNote.is_deleted)
.filter(db.or_(
EventNote.link_type != LinkType.event,
- ~Event.is_deleted
+ ~Event.is_deleted & _get_excluded_category_filter()
))
.filter(db.or_(
EventNote.link_type != LinkType.contribution,
- ~Contribution.is_deleted & ~contrib_event.is_deleted
+ ~Contribution.is_deleted & ~contrib_event.is_deleted & _get_excluded_category_filter(contrib_event)
))
.filter(db.or_(
EventNote.link_type != LinkType.subcontribution,
- ~SubContribution.is_deleted & ~subcontrib_contrib.is_deleted & ~subcontrib_event.is_deleted
+ db.and_(~SubContribution.is_deleted,
+ ~subcontrib_contrib.is_deleted,
+ ~subcontrib_event.is_deleted,
+ _get_excluded_category_filter(subcontrib_event))
))
.filter(db.or_(
EventNote.link_type != LinkType.session,
- ~Session.is_deleted & ~session_event.is_deleted
+ ~Session.is_deleted & ~session_event.is_deleted & _get_excluded_category_filter(session_event)
))
.options(
note_strategy,
- joinedload(EventNote.current_revision).raiseload(EventNoteRevision.user),
+ joinedload(EventNote.current_revision).joinedload(EventNoteRevision.user).joinedload('_affiliation'),
)
.order_by(EventNote.id)
)
diff --git a/livesync/indico_livesync/migrations/20201023_1224_6ef9616e57cb_add_note_id_and_update_constraints.py b/livesync/indico_livesync/migrations/20201023_1224_6ef9616e57cb_add_note_id_and_update_constraints.py
new file mode 100644
index 0000000..756845b
--- /dev/null
+++ b/livesync/indico_livesync/migrations/20201023_1224_6ef9616e57cb_add_note_id_and_update_constraints.py
@@ -0,0 +1,60 @@
+"""Add note_id and update constraints
+
+Revision ID: 6ef9616e57cb
+Revises: aa0dbc6c14aa
+Create Date: 2020-10-23 12:24:51.648130
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+
+# revision identifiers, used by Alembic.
+revision = '6ef9616e57cb'
+down_revision = 'aa0dbc6c14aa'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.add_column('queues', sa.Column('note_id', sa.Integer(), nullable=True, index=True), schema='plugin_livesync')
+ op.create_foreign_key(None, 'queues', 'notes', ['note_id'], ['id'], source_schema='plugin_livesync',
+ referent_schema='events')
+ op.drop_constraint('ck_queues_valid_enum_type', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_category_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_event_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_contribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_subcontribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_session_entry', 'queues', schema='plugin_livesync')
+ op.execute('''
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_enum_type CHECK ((type = ANY (ARRAY[1, 2, 3, 4, 5, 6])));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_category_entry CHECK (((type <> 1) OR ((contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (category_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_event_entry CHECK (((type <> 2) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (event_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_contribution_entry CHECK (((type <> 3) OR ((category_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (contribution_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_subcontribution_entry CHECK (((type <> 4) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_session_entry CHECK (((type <> 5) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (subcontribution_id IS NULL) AND (session_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_note_entry CHECK (((type <> 6) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (note_id IS NOT NULL))));
+ ''')
+
+
+def downgrade():
+ op.execute('DELETE FROM plugin_livesync.queues WHERE type = 6')
+
+ op.drop_constraint('ck_queues_valid_enum_type', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_category_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_event_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_contribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_subcontribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_session_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_note_entry', 'queues', schema='plugin_livesync')
+
+ op.drop_column('queues', 'note_id', schema='plugin_livesync')
+
+ op.execute('''
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_enum_type CHECK ((type = ANY (ARRAY[1, 2, 3, 4, 5])));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_category_entry CHECK (((type <> 1) OR ((contribution_id IS NULL) AND (event_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (category_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_event_entry CHECK (((type <> 2) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (event_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_contribution_entry CHECK (((type <> 3) OR ((category_id IS NULL) AND (event_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (contribution_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_subcontribution_entry CHECK (((type <> 4) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_session_entry CHECK (((type <> 5) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (subcontribution_id IS NULL) AND (session_id IS NOT NULL))));
+ ''')
diff --git a/livesync/indico_livesync/migrations/20210427_1359_d8e65cb6160d_add_attachment_id_to_queue.py b/livesync/indico_livesync/migrations/20210427_1359_d8e65cb6160d_add_attachment_id_to_queue.py
new file mode 100644
index 0000000..dd1fce9
--- /dev/null
+++ b/livesync/indico_livesync/migrations/20210427_1359_d8e65cb6160d_add_attachment_id_to_queue.py
@@ -0,0 +1,64 @@
+"""Add attachment_id to queue
+
+Revision ID: d8e65cb6160d
+Revises: 6ef9616e57cb
+Create Date: 2021-04-27 13:59:11.538263
+"""
+
+import sqlalchemy as sa
+from alembic import op
+
+
+# revision identifiers, used by Alembic.
+revision = 'd8e65cb6160d'
+down_revision = '6ef9616e57cb'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.add_column('queues', sa.Column('attachment_id', sa.Integer(), nullable=True, index=True), schema='plugin_livesync')
+ op.create_foreign_key(None, 'queues', 'attachments', ['attachment_id'], ['id'], source_schema='plugin_livesync',
+ referent_schema='attachments')
+ op.drop_constraint('ck_queues_valid_enum_type', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_category_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_event_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_contribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_subcontribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_session_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_note_entry', 'queues', schema='plugin_livesync')
+ op.execute('''
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_enum_type CHECK ((type = ANY (ARRAY[1, 2, 3, 4, 5, 6, 7])));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_attachment_entry CHECK (((type <> 7) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (attachment_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_category_entry CHECK (((type <> 1) OR ((attachment_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (category_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_contribution_entry CHECK (((type <> 3) OR ((attachment_id IS NULL) AND (category_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (contribution_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_event_entry CHECK (((type <> 2) OR ((attachment_id IS NULL) AND (category_id IS NULL) AND (contribution_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (event_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_note_entry CHECK (((type <> 6) OR ((attachment_id IS NULL) AND (category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (note_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_session_entry CHECK (((type <> 5) OR ((attachment_id IS NULL) AND (category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (subcontribution_id IS NULL) AND (session_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_subcontribution_entry CHECK (((type <> 4) OR ((attachment_id IS NULL) AND (category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NOT NULL))));
+ ''')
+
+
+def downgrade():
+ op.execute('DELETE FROM plugin_livesync.queues WHERE type = 7')
+
+ op.drop_constraint('ck_queues_valid_enum_type', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_category_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_event_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_contribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_subcontribution_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_session_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_note_entry', 'queues', schema='plugin_livesync')
+ op.drop_constraint('ck_queues_valid_attachment_entry', 'queues', schema='plugin_livesync')
+
+ op.drop_column('queues', 'attachment_id', schema='plugin_livesync')
+
+ op.execute('''
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_enum_type CHECK ((type = ANY (ARRAY[1, 2, 3, 4, 5, 6])));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_category_entry CHECK (((type <> 1) OR ((contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (category_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_contribution_entry CHECK (((type <> 3) OR ((category_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (contribution_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_event_entry CHECK (((type <> 2) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (event_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_note_entry CHECK (((type <> 6) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NULL) AND (note_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_session_entry CHECK (((type <> 5) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (subcontribution_id IS NULL) AND (session_id IS NOT NULL))));
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT ck_queues_valid_subcontribution_entry CHECK (((type <> 4) OR ((category_id IS NULL) AND (contribution_id IS NULL) AND (event_id IS NULL) AND (note_id IS NULL) AND (session_id IS NULL) AND (subcontribution_id IS NOT NULL))));
+ ''')
diff --git a/livesync/indico_livesync/migrations/20210506_1917_02a78555cdcb_add_location_changed_change_type.py b/livesync/indico_livesync/migrations/20210506_1917_02a78555cdcb_add_location_changed_change_type.py
new file mode 100644
index 0000000..720f478
--- /dev/null
+++ b/livesync/indico_livesync/migrations/20210506_1917_02a78555cdcb_add_location_changed_change_type.py
@@ -0,0 +1,30 @@
+"""Add location_changed change type
+
+Revision ID: 02a78555cdcb
+Revises: d8e65cb6160d
+Create Date: 2021-05-06 19:17:41.256096
+"""
+
+from alembic import op
+
+
+# revision identifiers, used by Alembic.
+revision = '02a78555cdcb'
+down_revision = 'd8e65cb6160d'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.execute('''
+ ALTER TABLE plugin_livesync.queues DROP CONSTRAINT "ck_queues_valid_enum_change";
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT "ck_queues_valid_enum_change" CHECK ((change = ANY (ARRAY[1, 2, 3, 4, 5, 6])));
+ ''')
+
+
+def downgrade():
+ op.execute('DELETE FROM plugin_livesync.queues WHERE change = 6')
+ op.execute('''
+ ALTER TABLE plugin_livesync.queues DROP CONSTRAINT "ck_queues_valid_enum_change";
+ ALTER TABLE plugin_livesync.queues ADD CONSTRAINT "ck_queues_valid_enum_change" CHECK ((change = ANY (ARRAY[1, 2, 3, 4, 5])));
+ ''')
diff --git a/livesync/indico_livesync/models/queue.py b/livesync/indico_livesync/models/queue.py
index 5bedade..707bfdf 100644
--- a/livesync/indico_livesync/models/queue.py
+++ b/livesync/indico_livesync/models/queue.py
@@ -6,11 +6,10 @@
# see the LICENSE file for more details.
from flask import g
-from werkzeug.datastructures import ImmutableDict
from indico.core.db.sqlalchemy import PyIntEnum, UTCDateTime, db
+from indico.modules.attachments.models.attachments import Attachment
from indico.modules.categories.models.categories import Category
-from indico.modules.events.models.events import Event
from indico.util.date_time import now_utc
from indico.util.enum import IndicoEnum
from indico.util.string import format_repr
@@ -25,6 +24,7 @@ class ChangeType(int, IndicoEnum):
moved = 3
data_changed = 4
protection_changed = 5
+ location_changed = 6
class EntryType(int, IndicoEnum):
@@ -33,6 +33,8 @@ class EntryType(int, IndicoEnum):
contribution = 3
subcontribution = 4
session = 5
+ note = 6
+ attachment = 7
_column_for_types = {
@@ -40,7 +42,9 @@ _column_for_types = {
EntryType.event: 'event_id',
EntryType.contribution: 'contribution_id',
EntryType.subcontribution: 'subcontribution_id',
- EntryType.session: 'session_id'
+ EntryType.session: 'session_id',
+ EntryType.note: 'note_id',
+ EntryType.attachment: 'attachment_id',
}
@@ -142,6 +146,24 @@ class LiveSyncQueueEntry(db.Model):
nullable=True
)
+ #: ID of the changed note
+ note_id = db.Column(
+ 'note_id',
+ db.Integer,
+ db.ForeignKey('events.notes.id'),
+ index=True,
+ nullable=True
+ )
+
+ #: ID of the changed attachment
+ attachment_id = db.Column(
+ 'attachment_id',
+ db.Integer,
+ db.ForeignKey('attachments.attachments.id'),
+ index=True,
+ nullable=True
+ )
+
#: The associated :class:LiveSyncAgent
agent = db.relationship(
'LiveSyncAgent',
@@ -198,6 +220,26 @@ class LiveSyncQueueEntry(db.Model):
)
)
+ note = db.relationship(
+ 'EventNote',
+ lazy=False,
+ backref=db.backref(
+ 'livesync_queue_entries',
+ cascade='all, delete-orphan',
+ lazy='dynamic'
+ )
+ )
+
+ attachment = db.relationship(
+ 'Attachment',
+ lazy=False,
+ backref=db.backref(
+ 'livesync_queue_entries',
+ cascade='all, delete-orphan',
+ lazy='dynamic'
+ )
+ )
+
@property
def object(self):
"""Return the changed object."""
@@ -211,16 +253,15 @@ class LiveSyncQueueEntry(db.Model):
return self.contribution
elif self.type == EntryType.subcontribution:
return self.subcontribution
-
- @property
- def object_ref(self):
- """Return the reference of the changed object."""
- return ImmutableDict(type=self.type, category_id=self.category_id, event_id=self.event_id,
- session_id=self.session_id, contrib_id=self.contrib_id, subcontrib_id=self.subcontrib_id)
+ elif self.type == EntryType.note:
+ return self.note
+ elif self.type == EntryType.attachment:
+ return self.attachment
def __repr__(self):
return format_repr(self, 'id', 'agent_id', 'change', 'type',
- category_id=None, event_id=None, session_id=None, contrib_id=None, subcontrib_id=None)
+ category_id=None, event_id=None, session_id=None, contrib_id=None, subcontrib_id=None,
+ note_id=None, attachment_id=None)
@classmethod
def create(cls, changes, ref, excluded_categories=set()):
@@ -240,7 +281,7 @@ class LiveSyncQueueEntry(db.Model):
if any(c.id in excluded_categories for c in obj.chain_query):
return
else:
- event = obj if isinstance(obj, Event) else obj.event
+ event = obj.folder.event if isinstance(obj, Attachment) else obj.event
if event.category not in g.setdefault('livesync_excluded_categories_checked', {}):
g.livesync_excluded_categories_checked[event.category] = excluded_categories & set(event.category_chain)
if g.livesync_excluded_categories_checked[event.category]:
diff --git a/livesync/indico_livesync/plugin.py b/livesync/indico_livesync/plugin.py
index 394eea5..b2b8e82 100644
--- a/livesync/indico_livesync/plugin.py
+++ b/livesync/indico_livesync/plugin.py
@@ -5,6 +5,7 @@
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
+from wtforms.fields.core import BooleanField
from wtforms.fields.html5 import IntegerField
from wtforms.validators import NumberRange
@@ -12,6 +13,7 @@ from indico.core import signals
from indico.core.plugins import IndicoPlugin, PluginCategory
from indico.web.forms.base import IndicoForm
from indico.web.forms.fields import MultipleItemsField
+from indico.web.forms.widgets import SwitchWidget
from indico_livesync import _
from indico_livesync.blueprint import blueprint
@@ -31,6 +33,8 @@ class SettingsForm(IndicoForm):
fields=[{'id': 'id', 'caption': _("Category ID"), 'required': True}],
description=_("Changes to objects inside these categories or any of their "
"subcategories are excluded."))
+ disable_queue_runs = BooleanField(_('Disable queue runs'), widget=SwitchWidget(),
+ description=_('Disable all scheduled queue runs.'))
class LiveSyncPlugin(IndicoPlugin):
@@ -42,7 +46,8 @@ class LiveSyncPlugin(IndicoPlugin):
configurable = True
settings_form = SettingsForm
default_settings = {'excluded_categories': [],
- 'queue_entry_ttl': 0}
+ 'queue_entry_ttl': 0,
+ 'disable_queue_runs': False}
category = PluginCategory.synchronization
def init(self):
diff --git a/livesync/indico_livesync/simplify.py b/livesync/indico_livesync/simplify.py
index 61dca60..4a9241e 100644
--- a/livesync/indico_livesync/simplify.py
+++ b/livesync/indico_livesync/simplify.py
@@ -11,10 +11,14 @@ from collections import defaultdict
from sqlalchemy.orm import joinedload
from indico.core.db import db
+from indico.modules.attachments.models.attachments import Attachment
+from indico.modules.attachments.models.folders import AttachmentFolder
from indico.modules.categories.models.categories import Category
from indico.modules.events.contributions.models.contributions import Contribution
from indico.modules.events.contributions.models.subcontributions import SubContribution
from indico.modules.events.models.events import Event
+from indico.modules.events.notes.models.notes import EventNote
+from indico.modules.events.sessions import Session
from indico.util.enum import IndicoEnum
from indico_livesync.models.queue import ChangeType, EntryType
@@ -26,6 +30,9 @@ class SimpleChange(int, IndicoEnum):
updated = 4
+CREATED_DELETED = SimpleChange.created | SimpleChange.deleted
+
+
def process_records(records):
"""Converts queue entries into object changes.
@@ -33,8 +40,10 @@ def process_records(records):
:return: a dict mapping object references to `SimpleChange` bitsets
"""
changes = defaultdict(int)
+ cascaded_create_records = set()
cascaded_update_records = set()
cascaded_delete_records = set()
+ cascaded_location_changes = set()
for record in records:
if record.change != ChangeType.deleted and record.object is None:
@@ -43,7 +52,7 @@ def process_records(records):
continue
if record.change == ChangeType.created:
assert record.type != EntryType.category
- changes[record.object] |= SimpleChange.created
+ cascaded_create_records.add(record)
elif record.change == ChangeType.deleted:
assert record.type != EntryType.category
cascaded_delete_records.add(record)
@@ -52,6 +61,14 @@ def process_records(records):
elif record.change == ChangeType.data_changed:
assert record.type != EntryType.category
changes[record.object] |= SimpleChange.updated
+ # subcontributions have their parent's time information, so we need to
+ # cascade contribution updates to them
+ if record.type == EntryType.contribution:
+ for subcontrib in record.object.subcontributions:
+ changes[subcontrib] |= SimpleChange.updated
+ elif record.change == ChangeType.location_changed:
+ assert record.type in (EntryType.event, EntryType.contribution, EntryType.session)
+ cascaded_location_changes.add(record)
for obj in _process_cascaded_category_contents(cascaded_update_records):
changes[obj] |= SimpleChange.updated
@@ -59,6 +76,17 @@ def process_records(records):
for obj in _process_cascaded_event_contents(cascaded_delete_records):
changes[obj] |= SimpleChange.deleted
+ for obj in _process_cascaded_event_contents(cascaded_create_records):
+ changes[obj] |= SimpleChange.created
+
+ for obj in _process_cascaded_locations(cascaded_location_changes):
+ changes[obj] |= SimpleChange.updated
+
+ created_and_deleted = {obj for obj, flags in changes.items() if (flags & CREATED_DELETED) == CREATED_DELETED}
+ for obj in created_and_deleted:
+ # discard any change where the object was both created and deleted
+ del changes[obj]
+
return changes
@@ -110,37 +138,120 @@ def _process_cascaded_event_contents(records, additional_events=None):
found in records
"""
changed_events = additional_events or set()
+ changed_sessions = set()
changed_contributions = set()
changed_subcontributions = set()
+ changed_attachments = set()
+ changed_notes = set()
+ note_records = {rec.note_id for rec in records if rec.type == EntryType.note}
+ attachment_records = {rec.attachment_id for rec in records if rec.type == EntryType.attachment}
session_records = {rec.session_id for rec in records if rec.type == EntryType.session}
contribution_records = {rec.contrib_id for rec in records if rec.type == EntryType.contribution}
subcontribution_records = {rec.subcontrib_id for rec in records if rec.type == EntryType.subcontribution}
event_records = {rec.event_id for rec in records if rec.type == EntryType.event}
+ if attachment_records:
+ changed_attachments.update(Attachment.query.filter(Attachment.id.in_(attachment_records)))
+
+ if note_records:
+ changed_notes.update(EventNote.query.filter(EventNote.id.in_(note_records)))
+
if event_records:
changed_events.update(Event.query.filter(Event.id.in_(event_records)))
- yield from changed_events
-
- # Sessions are added (explicitly changed only, since they don't need to be sent anywhere)
- if session_records:
- changed_contributions.update(Contribution.query
- .filter(Contribution.session_id.in_(session_records), ~Contribution.is_deleted))
-
- # Contributions are added (implictly + explicitly changed)
changed_event_ids = {ev.id for ev in changed_events}
- condition = Contribution.event_id.in_(changed_event_ids) & ~Contribution.is_deleted
- if contribution_records:
- condition = db.or_(condition, Contribution.id.in_(contribution_records))
- contrib_query = Contribution.query.filter(condition).options(joinedload('subcontributions'))
+ if changed_event_ids:
+ changed_attachments.update(
+ Attachment.query.filter(
+ Attachment.folder.has(AttachmentFolder.linked_event_id.in_(changed_event_ids))
+ )
+ )
+ changed_notes.update(EventNote.query.filter(EventNote.linked_event_id.in_(changed_event_ids)))
- for contribution in contrib_query:
+ yield from changed_events
+
+ # Sessions are added (implictly + explicitly changed)
+ if changed_event_ids or session_records:
+ condition = Session.event_id.in_(changed_event_ids) & ~Session.is_deleted
+ if session_records:
+ condition = db.or_(condition, Session.id.in_(session_records))
+ changed_sessions.update(Session.query.filter(Session.event_id.in_(changed_event_ids), ~Session.is_deleted))
+
+ if changed_sessions:
+ # XXX I kept this very similar to the structure of the code for contributions below,
+ # but why aren't we just merging this into the block right above?!
+ changed_session_ids = {s.id for s in changed_sessions}
+ changed_contributions.update(Contribution.query
+ .filter(Contribution.session_id.in_(changed_session_ids),
+ ~Contribution.is_deleted))
+ changed_attachments.update(
+ Attachment.query.filter(
+ ~Attachment.is_deleted,
+ Attachment.folder.has(db.and_(AttachmentFolder.session_id.in_(changed_session_ids),
+ ~AttachmentFolder.is_deleted))
+ )
+ )
+ changed_notes.update(EventNote.query.filter(EventNote.session_id.in_(changed_session_ids),
+ ~EventNote.is_deleted))
+
+ # Contributions are added (implictly + explicitly changed)
+ if changed_event_ids or contribution_records:
+ condition = Contribution.event_id.in_(changed_event_ids) & ~Contribution.is_deleted
+ if contribution_records:
+ condition = db.or_(condition, Contribution.id.in_(contribution_records))
+ changed_contributions.update(Contribution.query.filter(condition).options(joinedload('subcontributions')))
+
+ for contribution in changed_contributions:
yield contribution
changed_subcontributions.update(contribution.subcontributions)
+ if changed_contributions:
+ changed_contribution_ids = {c.id for c in changed_contributions}
+ changed_attachments.update(
+ Attachment.query.filter(
+ ~Attachment.is_deleted,
+ Attachment.folder.has(db.and_(AttachmentFolder.contribution_id.in_(changed_contribution_ids),
+ ~AttachmentFolder.is_deleted))
+ )
+ )
+ changed_notes.update(EventNote.query.filter(EventNote.contribution_id.in_(changed_contribution_ids),
+ ~EventNote.is_deleted))
+
# Same for subcontributions
if subcontribution_records:
changed_subcontributions.update(SubContribution.query.filter(SubContribution.id.in_(subcontribution_records)))
+
+ if changed_subcontributions:
+ changed_subcontribution_ids = {sc.id for sc in changed_subcontributions}
+ changed_attachments.update(
+ Attachment.query.filter(
+ ~Attachment.is_deleted,
+ Attachment.folder.has(db.and_(AttachmentFolder.subcontribution_id.in_(changed_subcontribution_ids),
+ ~AttachmentFolder.is_deleted))
+ )
+ )
+ changed_notes.update(EventNote.query.filter(EventNote.subcontribution_id.in_(changed_subcontribution_ids),
+ ~EventNote.is_deleted))
+
yield from changed_subcontributions
+ yield from changed_attachments
+ yield from changed_notes
+
+
+def _process_cascaded_locations(records):
+ contributions = {rec.contribution for rec in records if rec.type == EntryType.contribution}
+ events = {rec.event for rec in records if rec.type == EntryType.event}
+ event_ids = {e.id for e in events}
+ session_ids = {rec.session_id for rec in records if rec.type == EntryType.session}
+
+ # location of the event changed
+ yield from events
+ # location of the contribution changed
+ yield from contributions
+ # location of contributions inside an event may be inherited
+ # we don't check the inheritance since we're lazy and the chain is non-trivial
+ yield from Contribution.query.filter(Contribution.event_id.in_(event_ids), ~Contribution.is_deleted)
+ # location of a contribution inside a session may be inherited as well
+ yield from Contribution.query.filter(Contribution.session_id.in_(session_ids), ~Contribution.is_deleted)
diff --git a/livesync/indico_livesync/task.py b/livesync/indico_livesync/task.py
index 7a2f254..a68cfab 100644
--- a/livesync/indico_livesync/task.py
+++ b/livesync/indico_livesync/task.py
@@ -17,14 +17,19 @@ from indico_livesync.util import clean_old_entries
@celery.periodic_task(run_every=crontab(minute='*/15'), plugin='livesync')
def scheduled_update():
from indico_livesync.plugin import LiveSyncPlugin
+ if LiveSyncPlugin.settings.get('disable_queue_runs'):
+ LiveSyncPlugin.logger.warning('Queue runs are disabled')
+ return
clean_old_entries()
for agent in LiveSyncAgent.query.all():
if agent.backend is None:
LiveSyncPlugin.logger.warning('Skipping agent %s; backend not found', agent.name)
continue
- if not agent.initial_data_exported:
- LiveSyncPlugin.logger.warning('Skipping agent %s; initial export not performed yet', agent.name)
+ backend = agent.create_backend()
+ queue_allowed, reason = backend.check_queue_status()
+ if not queue_allowed:
+ LiveSyncPlugin.logger.warning('Skipping agent %s; queue runs disabled: %s', agent.name, reason)
continue
LiveSyncPlugin.logger.info('Running agent %s', agent.name)
- agent.create_backend().run()
+ backend.run()
db.session.commit()
diff --git a/livesync/indico_livesync/templates/plugin_details_extra.html b/livesync/indico_livesync/templates/plugin_details_extra.html
index 307cfed..78f85f0 100644
--- a/livesync/indico_livesync/templates/plugin_details_extra.html
+++ b/livesync/indico_livesync/templates/plugin_details_extra.html
@@ -26,8 +26,8 @@
| {% trans %}Name{% endtrans %} |
{% trans %}Backend{% endtrans %} |
{% trans %}Last Run{% endtrans %} |
- {% trans %}Initial Export{% endtrans %} |
{% trans %}Queue{% endtrans %} |
+ {% trans %}Status{% endtrans %} |
{% trans %}Actions{% endtrans %} |
@@ -50,14 +50,19 @@
{% trans %}Never{% endtrans %}
{%- endif -%}
+ {{ agent.queue.filter_by(processed=false).count() }} |
- {% if agent.initial_data_exported %}
- {% trans %}Done{% endtrans %}
+ {% if agent.backend %}
+ {% set queue_ready, reason = agent.create_backend().check_queue_status() %}
+ {% if queue_ready %}
+ {% trans %}Ready{% endtrans %}
+ {% else %}
+ {{ reason }}
+ {% endif %}
{% else %}
- {% trans %}Pending{% endtrans %}
+ {% trans %}n/a{% endtrans %}
{% endif %}
|
- {{ agent.queue.filter_by(processed=false).count() }} |
|