efficient build

- postpone building until really needed
- rebuild only if artifacts change
- no build on source update
- prune takes current resolver state instead of global var
This commit is contained in:
relikd
2022-04-13 15:41:57 +02:00
parent 8ae5376d41
commit 1d9629566c
8 changed files with 302 additions and 270 deletions

View File

@@ -1,12 +1,14 @@
from lektor.builder import Builder, PathCache
from lektor.db import Record # typing
from lektor.sourceobj import SourceObject # typing
from typing import Set, List
from .vobj import GroupBySource # typing
from .config import Config, AnyConfig
from .resolver import Resolver # typing
from lektor.builder import PathCache
from lektor.db import Record # isinstance
from typing import TYPE_CHECKING, Set, List
from .config import Config
from .watcher import Watcher
if TYPE_CHECKING:
from .config import AnyConfig
from lektor.builder import Builder
from lektor.sourceobj import SourceObject
from .resolver import Resolver
from .vobj import GroupBySource
class GroupBy:
@@ -16,11 +18,12 @@ class GroupBy:
The grouping is performed only once per build.
'''
def __init__(self) -> None:
def __init__(self, resolver: 'Resolver') -> None:
self._watcher = [] # type: List[Watcher]
self._results = [] # type: List[GroupBySource]
self.resolver = resolver
def add_watcher(self, key: str, config: AnyConfig) -> Watcher:
def add_watcher(self, key: str, config: 'AnyConfig') -> Watcher:
''' Init Config and add to watch list. '''
w = Watcher(Config.from_any(key, config))
self._watcher.append(w)
@@ -32,8 +35,9 @@ class GroupBy:
deps.update(w.config.dependencies)
return deps
def queue_all(self, builder: Builder) -> None:
def queue_all(self, builder: 'Builder') -> None:
''' Iterate full site-tree and queue all children. '''
self.dependencies = self.get_dependencies()
# remove disabled watchers
self._watcher = [w for w in self._watcher if w.config.enabled]
if not self._watcher:
@@ -45,30 +49,29 @@ class GroupBy:
queue = builder.pad.get_all_roots() # type: List[SourceObject]
while queue:
record = queue.pop()
self.queue_now(record)
if hasattr(record, 'attachments'):
queue.extend(record.attachments) # type: ignore[attr-defined]
if hasattr(record, 'children'):
queue.extend(record.children) # type: ignore[attr-defined]
if isinstance(record, Record):
for w in self._watcher:
if w.should_process(record):
w.process(record)
def queue_now(self, node: SourceObject) -> None:
''' Process record immediatelly (No-Op if already processed). '''
if isinstance(node, Record):
for w in self._watcher:
if w.should_process(node):
w.process(node)
def make_cluster(self, builder: Builder, resolver: Resolver) -> None:
def make_once(self, builder: 'Builder') -> None:
''' Perform groupby, iter over sources with watcher callback. '''
for w in self._watcher:
root = builder.pad.get(w.config.root)
for vobj in w.iter_sources(root):
self._results.append(vobj)
resolver.add(vobj)
self._watcher.clear()
if self._watcher:
self.resolver.reset()
for w in self._watcher:
root = builder.pad.get(w.config.root)
for vobj in w.iter_sources(root):
self._results.append(vobj)
self.resolver.add(vobj)
self._watcher.clear()
def build_all(self, builder: Builder) -> None:
def build_all(self, builder: 'Builder') -> None:
''' Create virtual objects and build sources. '''
self.make_once(builder) # in case no page used the |vgroups filter
path_cache = PathCache(builder.env)
for vobj in self._results:
if vobj.slug: