Browse Source

Wrote some more content, most base pipelines

main
Artemis 4 weeks ago
parent
commit
d3cfed0c95
  1. 35
      blep/core.py
  2. 61
      blep/pipelines.py
  3. 4
      setup.py

35
blep/core.py

@ -3,16 +3,25 @@ from functools import reduce
from pathlib import Path, PurePath
from queue import Queue, Empty
from shutil import rmtree
from typing import Union
from typing import Union, Callable
# Applying typing definition from aiohttp
if sys.version_info >= (3, 6):
PathLike = Union[str, "os.PathLike[str]"]
else:
PathLike = Union[str, PurePath]
# Other custom typings
Patterns = dict[str, str]
Pipelines = dict[str, list]
# deep-merge two dicts
def dict_deep_merge(source, destination):
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
dict_deep_merge(value, node)
else:
destination[key] = value
return destination
class File:
@ -30,11 +39,23 @@ class File:
self.raw = raw
# Pre-declaration to avoid cyclic type dependency
class Website:
pass
# Other custom typings
Patterns = dict[str, str]
Action = Callable[[Website, list[File]], list[File]]
Pipeline = list[Action]
Pipelines = dict[str, Pipeline]
class Job:
files: list[File]
pipeline: list
pipeline: Pipeline
def __init__(self, files: list[File], pipeline: list):
def __init__(self, files: list[File], pipeline: Pipeline):
self.files = files
self.pipeline = pipeline
@ -86,7 +107,7 @@ class Website:
for job in jobs:
self.job_queue.put(job)
def execute_pipeline(self, pipeline: list, files: list[File]):
def execute_pipeline(self, pipeline: Pipeline, files: list[File]):
reduce(lambda f, action: action(self, files), pipeline, files)
def prepare(self):

61
blep/pipelines.py

@ -1,13 +1,70 @@
# Pre-defined pipelines
from pathlib import Path
from shutil import copy as copy_file
from .core import File, Website
from .core import File, Website, dict_deep_merge
# Binary files handling
def copy(web: Website, files: list[File]):
# Copy file in-place without any renaming
def copy(web: Website, files: list[File]) -> list[File]:
for file in files:
origin = web.input_path / file.path
target = web.output_path / file.path
if not target.parent.is_dir():
target.parent.mkdir(parents=True)
copy_file(origin, target)
return files
# Load file's textual UTF-8 content into the file's raw buffer
def load(web: Website, files: list[File]) -> list[File]:
def read_file(p: Path):
with open(p, 'r') as f:
return f.read()
return [File(f.path, f.kind, raw=read_file(web.input_path / f.path)) for f in files]
# Write the file's raw buffer, without renaming
def save(web: Website, files: list[File]) -> list[File]:
for file in files:
with open(web.output_path / file.path, 'w+') as f:
f.write(file.raw)
return files
# Text and code processors
# Minify resource depending on the file's extension (supported: HTML, CSS, and JS)
def minify(_: Website, files: list[File]) -> list[File]:
from jsmin import jsmin
from lesscpy import compile as cssmin
from six import StringIO
from htmlmin import minify as htmlmin
def minify_file(file: File) -> File:
content = {
'.js': jsmin,
'.css': lambda raw: cssmin(StringIO(raw), minify=True),
'.html': htmlmin,
}[file.path.suffix](file.raw)
return File(file.path, file.kind, file.metadata, content)
return list(map(minify_file, files))
# Parse front-matter metadata into the attributes list
def parse_metadata(_: Website, files: list[File]) -> list[File]:
from frontmatter import Frontmatter
def parse(file: File) -> File:
data = Frontmatter.read(file.raw)
return File(file.path, file.kind, dict_deep_merge(file.metadata, data['attributes']), data['body'])
return list(map(parse, files))
# Parse Markdown
# Parse Gemini

4
setup.py

@ -18,6 +18,10 @@ setup(
author_email='git@artemix.org',
packages=find_packages(),
install_requires=[
# Minifiers
'lesscpy', 'htmlmin', 'jsmin', 'six',
# Parsing
'frontmatter',
],
classifiers=[
'Environment :: Console',

Loading…
Cancel
Save