refactor core python package, implement base CLI
This commit is contained in:
3
monobiome/__init__.py
Normal file
3
monobiome/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from importlib.metadata import version
|
||||
|
||||
__version__ = version("monobiome")
|
||||
@@ -1,5 +1,6 @@
|
||||
from monobiome.cli import create_parser, configure_logging
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = create_parser()
|
||||
args = parser.parse_args()
|
||||
@@ -16,4 +17,3 @@ def main() -> None:
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import argparse
|
||||
import logging
|
||||
import argparse
|
||||
|
||||
from monobiome.cli import generate, scheme
|
||||
from monobiome.cli import scheme, palette
|
||||
|
||||
logger: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,7 +26,7 @@ def create_parser() -> argparse.ArgumentParser:
|
||||
|
||||
subparsers = parser.add_subparsers(help="subcommand help")
|
||||
|
||||
generate.register_parser(subparsers)
|
||||
palette.register_parser(subparsers)
|
||||
scheme.register_parser(subparsers)
|
||||
|
||||
return parser
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
import argparse
|
||||
|
||||
def generate_scheme(args: argparse.Namespace) -> None:
|
||||
run_from_json(args.parameters_json, args.parameters_file)
|
||||
|
||||
|
||||
def register_parser(subparsers: _SubparserType) -> None:
|
||||
parser = subparsers.add_parser(
|
||||
"generate",
|
||||
help="generate theme variants"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-m",
|
||||
"--contrast-method",
|
||||
type=str,
|
||||
help="Raw JSON string with train parameters",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--contrast-level",
|
||||
type=str,
|
||||
help="Raw JSON string with train parameters",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"-base-lightness",
|
||||
type=str,
|
||||
help="Minimum lightness level",
|
||||
)
|
||||
parser.set_defaults(func=generate_scheme)
|
||||
51
monobiome/cli/palette.py
Normal file
51
monobiome/cli/palette.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from monobiome.util import _SubparserType
|
||||
from monobiome.palette import generate_palette
|
||||
|
||||
|
||||
def register_parser(subparsers: _SubparserType) -> None:
|
||||
parser = subparsers.add_parser(
|
||||
"palette",
|
||||
help="generate primary palette"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--notation",
|
||||
type=str,
|
||||
default="hex",
|
||||
choices=["hex", "oklch"],
|
||||
help="Color notation to export (either hex or oklch)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
type=str,
|
||||
default="toml",
|
||||
choices=["json", "toml"],
|
||||
help="Format of palette file (either JSON or TOML)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
type=str,
|
||||
help="Output file to write palette content",
|
||||
)
|
||||
|
||||
parser.set_defaults(func=handle_palette)
|
||||
|
||||
|
||||
def handle_palette(args: argparse.Namespace) -> None:
|
||||
notation = args.notation
|
||||
file_format = args.format
|
||||
output = args.output
|
||||
|
||||
palette_text = generate_palette(notation, file_format)
|
||||
|
||||
if output is None:
|
||||
print(palette_text)
|
||||
else:
|
||||
with Path(output).open("w") as f:
|
||||
f.write(palette_text)
|
||||
@@ -0,0 +1,154 @@
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from monobiome.util import _SubparserType
|
||||
from monobiome.scheme import generate_scheme
|
||||
from monobiome.constants import monotone_h_map
|
||||
|
||||
|
||||
def register_parser(subparsers: _SubparserType) -> None:
|
||||
parser = subparsers.add_parser(
|
||||
"scheme",
|
||||
help="create scheme variants"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"mode",
|
||||
type=str,
|
||||
choices=["dark", "light"],
|
||||
help="Scheme mode (light or dark)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"biome",
|
||||
type=str,
|
||||
choices=list(monotone_h_map.keys()),
|
||||
help="Biome setting for scheme."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-m",
|
||||
"--metric",
|
||||
type=str,
|
||||
default="oklch",
|
||||
choices=["wcag", "oklch", "lightness"],
|
||||
help="Metric to use for measuring swatch distances."
|
||||
)
|
||||
|
||||
# e.g., wcag=4.5; oklch=0.40; lightness=40
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--distance",
|
||||
type=float,
|
||||
default=0.40,
|
||||
help="Distance threshold for specified metric",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
type=str,
|
||||
help="Output file to write scheme content",
|
||||
)
|
||||
|
||||
# these params remain rooted in lightness; no need to accommodate metric
|
||||
# given these are monotone adjustments. You *could* consider rooting these
|
||||
# in metric units, but along monotones, distance=lightness and WCAG isn't a
|
||||
# particularly good measure of perceptual distinction, so we'd prefer the
|
||||
# former.
|
||||
parser.add_argument(
|
||||
"--l-base",
|
||||
type=int,
|
||||
default=20,
|
||||
help="Minimum lightness level (default: 20)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--l-step",
|
||||
type=int,
|
||||
default=5,
|
||||
help="Lightness step size (default: 5)",
|
||||
)
|
||||
|
||||
# gaps
|
||||
parser.add_argument(
|
||||
"--fg-gap",
|
||||
type=int,
|
||||
default=50,
|
||||
help="Foreground lightness gap (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--grey-gap",
|
||||
type=int,
|
||||
default=30,
|
||||
help="Grey lightness gap (default: 30)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--term-fg-gap",
|
||||
type=int,
|
||||
default=60,
|
||||
help="Terminal foreground lightness gap (default: 60)",
|
||||
)
|
||||
|
||||
parser.set_defaults(func=handle_scheme)
|
||||
|
||||
|
||||
def handle_scheme(args: argparse.Namespace) -> None:
|
||||
output = args.output
|
||||
|
||||
mode = args.mode
|
||||
biome = args.biome
|
||||
metric = args.metric
|
||||
distance = args.distance
|
||||
l_base = args.l_base
|
||||
l_step = args.l_step
|
||||
fg_gap = args.fg_gap
|
||||
grey_gap = args.grey_gap
|
||||
term_fg_gap = args.term_fg_gap
|
||||
|
||||
full_color_map = {
|
||||
"red": "red",
|
||||
"orange": "orange",
|
||||
"yellow": "yellow",
|
||||
"green": "green",
|
||||
"cyan": "cyan",
|
||||
"blue": "blue",
|
||||
"violet": "violet",
|
||||
"magenta": "orange",
|
||||
}
|
||||
term_color_map = {
|
||||
"red": "red",
|
||||
"yellow": "yellow",
|
||||
"green": "green",
|
||||
"cyan": "blue",
|
||||
"blue": "blue",
|
||||
"magenta": "orange",
|
||||
}
|
||||
vim_color_map = {
|
||||
"red": "red",
|
||||
"orange": "orange",
|
||||
"yellow": "yellow",
|
||||
"green": "green",
|
||||
"cyan": "green",
|
||||
"blue": "blue",
|
||||
"violet": "blue",
|
||||
"magenta": "red",
|
||||
}
|
||||
# vim_color_map = full_color_map
|
||||
|
||||
scheme_text = generate_scheme(
|
||||
mode,
|
||||
biome,
|
||||
metric,
|
||||
distance,
|
||||
l_base,
|
||||
l_step,
|
||||
fg_gap,
|
||||
grey_gap,
|
||||
term_fg_gap,
|
||||
full_color_map,
|
||||
term_color_map,
|
||||
vim_color_map,
|
||||
)
|
||||
|
||||
if output is None:
|
||||
print(scheme_text)
|
||||
else:
|
||||
with Path(output).open("w") as f:
|
||||
f.write(scheme_text)
|
||||
|
||||
@@ -1,157 +1,121 @@
|
||||
import tomllib
|
||||
from importlib.resources import files
|
||||
|
||||
import numpy as np
|
||||
|
||||
# SET LIGHTNESS CONTROL POINTS
|
||||
# L_points = [10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 98]
|
||||
L_points = list(range(10, 98+1))
|
||||
from monobiome.curve import (
|
||||
l_maxC_h,
|
||||
bezier_y_at_x,
|
||||
)
|
||||
|
||||
# FIXED MONOBIOME PARAMETERS
|
||||
L_resolution = 5 # step size along lightness dim
|
||||
L_space = np.arange(0, 100+L_resolution, L_resolution)
|
||||
parameters_file = files("monobiome.data") / "parameters.toml"
|
||||
parameters = tomllib.load(parameters_file.open("rb"))
|
||||
|
||||
monotone_C_map = {
|
||||
"alpine": 0,
|
||||
"badlands": 0.011,
|
||||
"chaparral": 0.011,
|
||||
"savanna": 0.011,
|
||||
"grassland": 0.011,
|
||||
"tundra": 0.011,
|
||||
}
|
||||
L_min: int = parameters.get("L_min", 10)
|
||||
L_max: int = parameters.get("L_max", 98)
|
||||
L_step: int = parameters.get("L_step", 5)
|
||||
|
||||
h_weights = {
|
||||
"red": 3.0,
|
||||
"orange": 3.8, # 3.6
|
||||
"yellow": 3.8, # 4.0
|
||||
"green": 3.8,
|
||||
"blue": 3.4, # 3.8
|
||||
}
|
||||
h_L_offsets = {
|
||||
"red": 0, # -1,
|
||||
"orange": -5.5, # -3,
|
||||
"yellow": -13.5, # -8
|
||||
"green": -11, # -8
|
||||
"blue": 10, # 14
|
||||
}
|
||||
h_C_offsets = {
|
||||
"red": 0, # 0
|
||||
"orange": -0.01, # -0.02
|
||||
"yellow": -0.052, # -0.08
|
||||
"green": -0.088, # -0.105
|
||||
"blue": 0.0, # 0.01
|
||||
}
|
||||
L_points: list[int] = list(range(L_min, L_max+1))
|
||||
L_space = np.arange(0, 100 + L_step, L_step)
|
||||
|
||||
monotone_h_map = {
|
||||
"alpine": 0,
|
||||
"badlands": 29,
|
||||
"chaparral": 62.5,
|
||||
"savanna": 104,
|
||||
"grassland": 148,
|
||||
"tundra": 262,
|
||||
}
|
||||
accent_h_map = {
|
||||
"red": 29,
|
||||
"orange": 62.5,
|
||||
"yellow": 104,
|
||||
"green": 148,
|
||||
"blue": 262,
|
||||
}
|
||||
monotone_C_map = parameters.get("monotone_C_map", {})
|
||||
h_weights = parameters.get("h_weights", {})
|
||||
h_L_offsets = parameters.get("h_L_offsets", {})
|
||||
h_C_offsets = parameters.get("h_C_offsets", {})
|
||||
monotone_h_map = parameters.get("monotone_h_map", {})
|
||||
accent_h_map = parameters.get("accent_h_map", {})
|
||||
h_map = {**monotone_h_map, **accent_h_map}
|
||||
|
||||
"""
|
||||
Compute chroma maxima at provided lightness levels across hues.
|
||||
|
||||
v111_L_space = list(range(15, 95+1, 5))
|
||||
v111_hC_points = {
|
||||
"red": [
|
||||
0.058,
|
||||
0.074,
|
||||
0.092,
|
||||
0.11,
|
||||
0.128,
|
||||
0.147,
|
||||
0.167,
|
||||
0.183,
|
||||
0.193,
|
||||
0.193,
|
||||
0.182,
|
||||
0.164,
|
||||
0.14,
|
||||
0.112,
|
||||
0.081,
|
||||
0.052,
|
||||
0.024,
|
||||
],
|
||||
"orange": [
|
||||
0.030,
|
||||
0.038,
|
||||
0.046,
|
||||
0.058,
|
||||
0.07,
|
||||
0.084,
|
||||
0.1,
|
||||
0.114,
|
||||
0.125,
|
||||
0.134,
|
||||
0.138,
|
||||
0.136,
|
||||
0.128,
|
||||
0.112,
|
||||
0.092,
|
||||
0.064,
|
||||
0.032,
|
||||
],
|
||||
"yellow": [
|
||||
0.02,
|
||||
0.024,
|
||||
0.03,
|
||||
0.036,
|
||||
0.044,
|
||||
0.05,
|
||||
0.06,
|
||||
0.068,
|
||||
0.076,
|
||||
0.082,
|
||||
0.088,
|
||||
0.088,
|
||||
0.086,
|
||||
0.082,
|
||||
0.072,
|
||||
0.058,
|
||||
0.04,
|
||||
],
|
||||
"green": [
|
||||
0.0401,
|
||||
0.048,
|
||||
0.056,
|
||||
0.064,
|
||||
0.072,
|
||||
0.08,
|
||||
0.09,
|
||||
0.098,
|
||||
0.104,
|
||||
0.108,
|
||||
0.11,
|
||||
0.108,
|
||||
0.102,
|
||||
0.094,
|
||||
0.084,
|
||||
0.072,
|
||||
0.05,
|
||||
],
|
||||
"blue": [
|
||||
0.06,
|
||||
0.072,
|
||||
0.084,
|
||||
0.096,
|
||||
0.106,
|
||||
0.116,
|
||||
0.124,
|
||||
0.13,
|
||||
0.132,
|
||||
0.128,
|
||||
0.122,
|
||||
0.11,
|
||||
0.096,
|
||||
0.08,
|
||||
0.064,
|
||||
0.044,
|
||||
0.023,
|
||||
],
|
||||
A map with max chroma values for each hue across lightness space
|
||||
|
||||
{
|
||||
"red": [ Cmax@L=10, Cmax@L=11, Cmax@L=12, ... ],
|
||||
"orange": [ Cmax@L=10, Cmax@L=11, Cmax@L=12, ... ],
|
||||
...
|
||||
}
|
||||
"""
|
||||
Lspace_Cmax_Hmap = {
|
||||
h_str: [l_maxC_h(_L, _h) for _L in L_space]
|
||||
for h_str, _h in h_map.items()
|
||||
}
|
||||
|
||||
|
||||
"""
|
||||
Set QBR curves, *unbounded* chroma curves for all hues
|
||||
|
||||
1. Raw bezier chroma values for each hue across the lightness space
|
||||
|
||||
Lpoints_Cqbr_Hmap = {
|
||||
"red": [ Bezier@L=10, Bezier@L=11, Bezier@L=12, ... ],
|
||||
...
|
||||
}
|
||||
|
||||
2. Three bezier control points for each hue's chroma curve
|
||||
|
||||
QBR_ctrl_Hmap = {
|
||||
"red": np.array([
|
||||
[ x1, y1 ],
|
||||
[ x2, y2 ],
|
||||
[ x3, y3 ]
|
||||
]),
|
||||
...
|
||||
}
|
||||
"""
|
||||
Lpoints_Cqbr_Hmap = {}
|
||||
QBR_ctrl_Hmap = {}
|
||||
|
||||
for h_str, _h in monotone_h_map.items():
|
||||
Lpoints_Cqbr_Hmap[h_str] = np.array(
|
||||
[monotone_C_map[h_str]]*len(L_points)
|
||||
)
|
||||
|
||||
for h_str, _h in accent_h_map.items():
|
||||
Lspace_Cmax = Lspace_Cmax_Hmap[h_str]
|
||||
|
||||
# get L value of max chroma; will be a bezier control
|
||||
L_Cmax_idx = np.argmax(Lspace_Cmax)
|
||||
L_Cmax = L_space[L_Cmax_idx]
|
||||
|
||||
# offset control point by any preset x-shift
|
||||
L_Cmax += h_L_offsets[h_str]
|
||||
|
||||
# and get max C at the L offset
|
||||
Cmax = l_maxC_h(L_Cmax, _h)
|
||||
|
||||
# set 3 control points; shift by any global linear offest
|
||||
C_offset = h_C_offsets.get(h_str, 0)
|
||||
|
||||
p_0 = np.array([0, 0])
|
||||
p_Cmax = np.array([L_Cmax, Cmax + C_offset])
|
||||
p_100 = np.array([100, 0])
|
||||
|
||||
B_L_points = bezier_y_at_x(
|
||||
p_0, p_Cmax, p_100,
|
||||
h_weights.get(h_str, 1),
|
||||
L_points
|
||||
)
|
||||
Lpoints_Cqbr_Hmap[h_str] = B_L_points
|
||||
QBR_ctrl_Hmap[h_str] = np.vstack([p_0, p_Cmax, p_100])
|
||||
|
||||
|
||||
"""
|
||||
Bezier chroma values, but bounded to attainable gamut colors (bezier fit
|
||||
can produce invalid chroma values)
|
||||
|
||||
h_L_points_Cstar = {
|
||||
"red": [ bounded-bezier@L=10, bounded-bezier@L=11, ... ],
|
||||
...
|
||||
}
|
||||
"""
|
||||
Lpoints_Cstar_Hmap = {}
|
||||
|
||||
for h_str, L_points_C in Lpoints_Cqbr_Hmap.items():
|
||||
_h = h_map[h_str]
|
||||
|
||||
Lpoints_Cstar_Hmap[h_str] = [
|
||||
max(0, min(_C, l_maxC_h(_L, _h)))
|
||||
for _L, _C in zip(L_points, L_points_C, strict=True)
|
||||
]
|
||||
|
||||
77
monobiome/curve.py
Normal file
77
monobiome/curve.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from functools import cache
|
||||
|
||||
import numpy as np
|
||||
from coloraide import Color
|
||||
|
||||
|
||||
def quad_bezier_rational(
|
||||
P0: float,
|
||||
P1: float,
|
||||
P2: float,
|
||||
w: float,
|
||||
t: np.array,
|
||||
) -> np.array:
|
||||
"""
|
||||
Compute the point values of a quadratic rational Bezier curve.
|
||||
|
||||
Uses `P0`, `P1`, and `P2` as the three control points of the curve. `w`
|
||||
controls the weight toward the middle control point ("sharpness" of the
|
||||
curve"), and `t` is the number of sample points used along the curve.
|
||||
"""
|
||||
|
||||
t = np.asarray(t)[:, None]
|
||||
num = (1-t)**2*P0 + 2*w*(1-t)*t*P1 + t**2*P2
|
||||
den = (1-t)**2 + 2*w*(1-t)*t + t**2
|
||||
|
||||
return num / den
|
||||
|
||||
def bezier_y_at_x(
|
||||
P0: float,
|
||||
P1: float,
|
||||
P2: float,
|
||||
w: float,
|
||||
x: float,
|
||||
n: int = 400,
|
||||
) -> np.array:
|
||||
"""
|
||||
For the provided QBR parameters, provide the curve value at the given
|
||||
input.
|
||||
"""
|
||||
|
||||
t = np.linspace(0, 1, n)
|
||||
B = quad_bezier_rational(P0, P1, P2, w, t)
|
||||
x_vals, y_vals = B[:, 0], B[:, 1]
|
||||
|
||||
return np.interp(x, x_vals, y_vals)
|
||||
|
||||
@cache
|
||||
def l_maxC_h(
|
||||
_l: float,
|
||||
_h: float,
|
||||
space: str = 'srgb',
|
||||
eps: float = 1e-6,
|
||||
tol: float = 1e-9
|
||||
) -> float:
|
||||
"""
|
||||
Binary search for max attainable OKLCH chroma at fixed lightness and hue.
|
||||
|
||||
Parameters:
|
||||
_l: lightness
|
||||
_h: hue
|
||||
|
||||
Returns:
|
||||
Max in-gamut chroma at provided lightness and hue
|
||||
"""
|
||||
|
||||
def chroma_in_gamut(_c: float) -> bool:
|
||||
color = Color('oklch', [_l/100, _c, _h])
|
||||
return color.convert(space).in_gamut(tolerance=tol)
|
||||
|
||||
lo, hi = 0.0, 0.1
|
||||
while chroma_in_gamut(hi):
|
||||
hi *= 2
|
||||
while hi - lo > eps:
|
||||
m = (lo + hi) / 2
|
||||
lo, hi = (m, hi) if chroma_in_gamut(m) else (lo, m)
|
||||
|
||||
return lo
|
||||
@@ -1,178 +0,0 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from functools import cache
|
||||
|
||||
import numpy as np
|
||||
|
||||
from pprint import pprint
|
||||
from coloraide import Color
|
||||
|
||||
from monobiome.constants import (
|
||||
L_points,
|
||||
L_resolution,
|
||||
L_space,
|
||||
|
||||
monotone_C_map,
|
||||
h_weights,
|
||||
h_L_offsets,
|
||||
h_C_offsets,
|
||||
|
||||
monotone_h_map,
|
||||
accent_h_map,
|
||||
h_map,
|
||||
)
|
||||
|
||||
|
||||
@cache
|
||||
def L_maxC_h(L, h, space='srgb', eps=1e-6, tol=1e-9):
|
||||
"""
|
||||
Binary search for max attainable OKLCH chroma at fixed lightness and hue.
|
||||
|
||||
Parameters:
|
||||
L: lightness percentage
|
||||
"""
|
||||
|
||||
def C_in_gamut(C):
|
||||
return Color('oklch', [L/100, C, h]).convert(space).in_gamut(tolerance=tol)
|
||||
|
||||
lo, hi = 0.0, 0.1
|
||||
while C_in_gamut(hi):
|
||||
hi *= 2
|
||||
while hi - lo > eps:
|
||||
m = (lo + hi) / 2
|
||||
lo, hi = (m, hi) if C_in_gamut(m) else (lo, m)
|
||||
|
||||
Cmax = lo
|
||||
# c_oklch = Color('oklch', [L, Cmax, h])
|
||||
# c_srgb = c_oklch.convert('srgb')
|
||||
|
||||
return Cmax
|
||||
|
||||
def quad_bezier_rational(P0, P1, P2, w, t):
|
||||
"""
|
||||
Compute the point values of a quadratic rational Bezier curve.
|
||||
|
||||
Uses `P0`, `P1`, and `P2` as the three control points of the curve. `w`
|
||||
controls the weight toward the middle control point ("sharpness" of the
|
||||
curve"), and `t` is the number of sample points used along the curve.
|
||||
"""
|
||||
|
||||
t = np.asarray(t)[:, None]
|
||||
num = (1-t)**2*P0 + 2*w*(1-t)*t*P1 + t**2*P2
|
||||
den = (1-t)**2 + 2*w*(1-t)*t + t**2
|
||||
|
||||
return num / den
|
||||
|
||||
def bezier_y_at_x(P0, P1, P2, w, x_query, n=400):
|
||||
"""
|
||||
For the provided QBR parameters, provide the curve value at the given
|
||||
input.
|
||||
"""
|
||||
|
||||
t = np.linspace(0, 1, n)
|
||||
B = quad_bezier_rational(P0, P1, P2, w, t)
|
||||
x_vals, y_vals = B[:, 0], B[:, 1]
|
||||
|
||||
return np.interp(x_query, x_vals, y_vals)
|
||||
|
||||
def Lspace_Cmax_Hmap(h_map: dict[str, float], L_space):
|
||||
"""
|
||||
Compute chroma maxima at provided lightness levels across hues.
|
||||
|
||||
Parameters:
|
||||
h_map: map from hue names to hue values
|
||||
L_space: array-like set of lightness values
|
||||
|
||||
Returns:
|
||||
A map with max chroma values for each hue across lightness space
|
||||
|
||||
{
|
||||
"red": [ Cmax@L=10, Cmax@L=11, Cmax@L=12, ... ],
|
||||
"orange": [ Cmax@L=10, Cmax@L=11, Cmax@L=12, ... ],
|
||||
...
|
||||
}
|
||||
"""
|
||||
# compute C max values over each point in L space
|
||||
|
||||
h_Lspace_Cmax = {
|
||||
h_str: [max_C_Lh(_L, _h) for _L in L_space]
|
||||
for h_str, _h in h_map.items()
|
||||
}
|
||||
|
||||
return h_Lspace_Cmax
|
||||
|
||||
def ():
|
||||
"""
|
||||
|
||||
|
||||
raw bezier chroma values for each hue across the lightness space
|
||||
h_L_points_C = {
|
||||
"red": [ Bezier@L=10, Bezier@L=11, Bezier@L=12, ... ],
|
||||
...
|
||||
}
|
||||
|
||||
three bezier control points for each hue's chroma curve
|
||||
h_ctrl_L_C = {
|
||||
"red": np.array([
|
||||
[ x1, y1 ],
|
||||
[ x2, y2 ],
|
||||
[ x3, y3 ]
|
||||
]),
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
# compute *unbounded* chroma curves for all hues
|
||||
h_L_points_C = {}
|
||||
h_ctrl_L_C = {}
|
||||
|
||||
for h_str, _h in monotone_h_map.items():
|
||||
h_L_points_C[h_str] = np.array([monotone_C_map[h_str]]*len(L_points))
|
||||
|
||||
for h_str, _h in accent_h_map.items():
|
||||
Lspace_Cmax = h_Lspace_Cmax[h_str]
|
||||
|
||||
# get L value of max chroma; will be a bezier control
|
||||
L_Cmax_idx = np.argmax(Lspace_Cmax)
|
||||
L_Cmax = L_space[L_Cmax_idx]
|
||||
|
||||
# offset control point by any preset x-shift
|
||||
L_Cmax += h_L_offsets[h_str]
|
||||
|
||||
# and get max C at the L offset
|
||||
Cmax = max_C_Lh(L_Cmax, _h)
|
||||
|
||||
# set 3 control points; shift by any global linear offest
|
||||
C_offset = h_C_offsets.get(h_str, 0)
|
||||
|
||||
p_0 = np.array([0, 0])
|
||||
p_Cmax = np.array([L_Cmax, Cmax + C_offset])
|
||||
p_100 = np.array([100, 0])
|
||||
|
||||
B_L_points = bezier_y_at_x(p_0, p_Cmax, p_100, h_weights.get(h_str, 1), L_points)
|
||||
h_L_points_C[h_str] = B_L_points
|
||||
h_ctrl_L_C[h_str] = np.vstack([p_0, p_Cmax, p_100])
|
||||
|
||||
|
||||
def ():
|
||||
"""
|
||||
bezier chroma values, but bounded to attainable gamut colors (bezier fit can produce invalid chroma values)
|
||||
h_L_points_Cstar = {
|
||||
"red": [ bounded-bezier@L=10, bounded-bezier@L=11, ... ],
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
# compute full set of final chroma curves; limits every point to in-gamut max
|
||||
h_LC_color_map = {}
|
||||
h_L_points_Cstar = {}
|
||||
|
||||
for h_str, L_points_C in h_L_points_C.items():
|
||||
_h = h_map[h_str]
|
||||
|
||||
h_L_points_Cstar[h_str] = [
|
||||
max(0, min(_C, max_C_Lh(_L, _h)))
|
||||
for _L, _C in zip(L_points, L_points_C)
|
||||
]
|
||||
|
||||
# if __name__ == "__main__":
|
||||
54
monobiome/palette.py
Normal file
54
monobiome/palette.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import json
|
||||
from functools import cache
|
||||
from importlib.metadata import version
|
||||
|
||||
from coloraide import Color
|
||||
|
||||
from monobiome.constants import (
|
||||
h_map,
|
||||
L_points,
|
||||
Lpoints_Cstar_Hmap,
|
||||
)
|
||||
|
||||
|
||||
@cache
|
||||
def compute_hlc_map(notation: str) -> dict[str, dict[int, str]]:
|
||||
hlc_map = {}
|
||||
|
||||
for h_str, Lpoints_Cstar in Lpoints_Cstar_Hmap.items():
|
||||
_h = h_map[h_str]
|
||||
hlc_map[h_str] = {}
|
||||
|
||||
for _l, _c in zip(L_points, Lpoints_Cstar, strict=True):
|
||||
oklch = Color('oklch', [_l/100, _c, _h])
|
||||
|
||||
if notation == "hex":
|
||||
srgb = oklch.convert('srgb')
|
||||
c_str = srgb.to_string(hex=True)
|
||||
elif notation == "oklch":
|
||||
ol, oc, oh = oklch.convert('oklch').coords()
|
||||
c_str = f"oklch({ol*100:.1f}% {oc:.4f} {oh:.1f})"
|
||||
|
||||
hlc_map[h_str][_l] = c_str
|
||||
|
||||
return hlc_map
|
||||
|
||||
def generate_palette(
|
||||
notation: str,
|
||||
file_format: str,
|
||||
) -> str:
|
||||
mb_version = version("monobiome")
|
||||
hlc_map = compute_hlc_map(notation)
|
||||
|
||||
if file_format == "json":
|
||||
hlc_map["version"] = mb_version
|
||||
return json.dumps(hlc_map, indent=4)
|
||||
else:
|
||||
toml_lines = [f"version = {mb_version}", ""]
|
||||
for _h, _lc_map in hlc_map.items():
|
||||
toml_lines.append(f"[{_h}]")
|
||||
for _l, _c in _lc_map.items():
|
||||
toml_lines.append(f'l{_l} = "{_c}"')
|
||||
toml_lines.append("")
|
||||
|
||||
return "\n".join(toml_lines)
|
||||
@@ -1,131 +1,176 @@
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.interpolate import interp1d, CubicSpline, BSpline
|
||||
|
||||
from monobiome.constants import (
|
||||
L_points,
|
||||
L_space,
|
||||
h_weights,
|
||||
monotone_h_map,
|
||||
accent_h_map,
|
||||
h_map,
|
||||
)
|
||||
from monobiome.curves import (
|
||||
h_L_points_Cstar,
|
||||
h_Lspace_Cmax,
|
||||
L_space,
|
||||
L_points,
|
||||
accent_h_map,
|
||||
monotone_h_map,
|
||||
Lspace_Cmax_Hmap,
|
||||
Lpoints_Cstar_Hmap,
|
||||
)
|
||||
|
||||
ax_h_map = {}
|
||||
fig, axes = plt.subplots(
|
||||
len(monotone_h_map),
|
||||
1,
|
||||
sharex=True,
|
||||
sharey=True,
|
||||
figsize=(4, 8)
|
||||
)
|
||||
|
||||
for i, h_str in enumerate(h_L_points_Cstar):
|
||||
_h = h_map[h_str]
|
||||
L_points_Cstar = h_L_points_Cstar[h_str]
|
||||
L_space_Cmax = h_Lspace_Cmax[h_str]
|
||||
|
||||
if _h not in ax_h_map:
|
||||
ax_h_map[_h] = axes[i]
|
||||
ax = ax_h_map[_h]
|
||||
def plot_hue_chroma_bounds() -> None:
|
||||
name_h_map = {}
|
||||
ax_h_map = {}
|
||||
fig, axes = plt.subplots(
|
||||
len(monotone_h_map),
|
||||
1,
|
||||
sharex=True,
|
||||
sharey=True,
|
||||
figsize=(4, 10)
|
||||
)
|
||||
|
||||
# plot Cmax and Cstar
|
||||
ax.plot(L_space, L_space_Cmax, c="b", alpha=0.2)
|
||||
ax.plot(L_points, L_points_Cstar, alpha=0.7)
|
||||
|
||||
ax.title.set_text(f"Hue [${_h}$]")
|
||||
|
||||
axes[-1].set_xlabel("Lightness (%)")
|
||||
axes[-1].set_xticks([L_points[0], L_points[-1]])
|
||||
for i, h_str in enumerate(Lpoints_Cstar_Hmap):
|
||||
_h = h_map[h_str]
|
||||
|
||||
fig.tight_layout()
|
||||
fig.subplots_adjust(top=0.9)
|
||||
|
||||
plt.suptitle("$C^*$ curves for hue groups")
|
||||
plt.show()
|
||||
|
||||
|
||||
|
||||
ax_h_map = {}
|
||||
fig, axes = plt.subplots(
|
||||
len(monotone_h_map),
|
||||
1,
|
||||
sharex=True,
|
||||
sharey=True,
|
||||
figsize=(5, 10)
|
||||
)
|
||||
|
||||
for i, h_str in enumerate(h_L_points_Cstar):
|
||||
_h = h_map[h_str]
|
||||
L_points_Cstar = h_L_points_Cstar[h_str]
|
||||
L_space_Cmax = h_Lspace_Cmax[h_str]
|
||||
|
||||
if _h not in ax_h_map:
|
||||
ax_h_map[_h] = axes[i]
|
||||
ax = ax_h_map[_h]
|
||||
|
||||
# plot Cmax and Cstar
|
||||
ax.plot(L_space, L_space_Cmax, c="b", alpha=0.2, label='Cmax')
|
||||
ax.plot(L_points, L_points_Cstar, alpha=0.7, label='C*')
|
||||
|
||||
if h_str in v111_hC_points:
|
||||
ax.scatter(v111_L_space, v111_hC_points[h_str], s=4, label='Cv111')
|
||||
l_space_Cmax = Lspace_Cmax_Hmap[h_str]
|
||||
l_points_Cstar = Lpoints_Cstar_Hmap[h_str]
|
||||
|
||||
if h_str in h_ctrl_L_C:
|
||||
cpts = h_ctrl_L_C[h_str]
|
||||
cpt_x, cpt_y = cpts[:, 0], cpts[:, 1]
|
||||
h_w = h_weights.get(h_str, 1)
|
||||
if _h not in ax_h_map:
|
||||
ax_h_map[_h] = axes[i]
|
||||
ax = ax_h_map[_h]
|
||||
|
||||
P0, P1, P2 = cpts[0], cpts[1], cpts[2]
|
||||
d0 = 2 * h_w * (P1 - P0)
|
||||
d2 = 2 * h_w * (P2 - P1)
|
||||
if _h not in name_h_map:
|
||||
name_h_map[_h] = []
|
||||
name_h_map[_h].append(h_str)
|
||||
|
||||
handle_scale = 0.25
|
||||
H0 = P0 + handle_scale * d0
|
||||
H2 = P2 - handle_scale * d2
|
||||
|
||||
# ax.plot([P0[0], H0[0]], [P0[1], H0[1]], color='tab:blue', lw=1)
|
||||
# ax.plot([P2[0], H2[0]], [P2[1], H2[1]], color='tab:orange', lw=1)
|
||||
# plot Cmax and Cstar
|
||||
ax.plot(L_space, l_space_Cmax, c="g", alpha=0.3, label="Cmax")
|
||||
|
||||
ax.plot(cpt_x, cpt_y, '--', color='gray', lw=1, label='Bezier polygon')
|
||||
ax.scatter(cpt_x, cpt_y, color='red', zorder=5, s=2, label='Control points')
|
||||
cstar_label = f"{'accent' if h_str in accent_h_map else 'monotone'} C*"
|
||||
ax.plot(L_points, l_points_Cstar, alpha=0.7, label=cstar_label)
|
||||
|
||||
ax.title.set_text(f"Hue [${_h}$]")
|
||||
|
||||
axes[-1].set_ylabel("Chroma (C)")
|
||||
axes[-1].set_xlabel("Lightness (%)")
|
||||
axes[-1].set_xticks([L_points[0], 50, 65, L_points[-1]])
|
||||
ax.title.set_text(f"Hue [${_h}$] - {'|'.join(name_h_map[_h])}")
|
||||
|
||||
axes[-1].set_xlabel("Lightness (%)")
|
||||
axes[-1].set_xticks([L_points[0], L_points[-1]])
|
||||
|
||||
fig.tight_layout()
|
||||
fig.subplots_adjust(top=0.9)
|
||||
fig.tight_layout()
|
||||
fig.subplots_adjust(top=0.9)
|
||||
|
||||
handles, labels = axes[-1].get_legend_handles_labels()
|
||||
unique = dict(zip(labels, handles))
|
||||
fig.legend(unique.values(), unique.keys(), loc='lower center', bbox_to_anchor=(0.5, -0.06), ncol=3)
|
||||
handles, labels = axes[-1].get_legend_handles_labels()
|
||||
unique = dict(zip(labels, handles))
|
||||
fig.legend(unique.values(), unique.keys(), loc='lower center', bbox_to_anchor=(0.5, -0.06), ncol=3)
|
||||
|
||||
plt.suptitle("$C^*$ curves for hue groups + v111 5% lightness")
|
||||
plt.show()
|
||||
plt.suptitle("$C^*$ curves for hue groups")
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_hue_chroma_star() -> None:
|
||||
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
|
||||
|
||||
# uncomment to preview 5 core term colors
|
||||
colors = accent_h_map.keys()
|
||||
#colors = set(["red", "orange", "yellow", "green", "blue"])
|
||||
|
||||
for h_str in Lpoints_Cstar_Hmap:
|
||||
if h_str not in accent_h_map or h_str not in colors:
|
||||
continue
|
||||
ax.fill_between(
|
||||
L_points,
|
||||
Lpoints_Cstar_Hmap[h_str],
|
||||
alpha=0.2,
|
||||
color='grey',
|
||||
label=h_str
|
||||
)
|
||||
|
||||
x, y = L_points, Lpoints_Cstar_Hmap[h_str]
|
||||
n = int(0.45*len(x))
|
||||
ax.text(x[n], y[n]-0.01, h_str, rotation=10, va='center', ha='left')
|
||||
|
||||
ax.set_xlabel("Lightness (%)")
|
||||
ax.set_xticks([L_points[0], 45, 50, 55, 60, 65, 70, L_points[-1]])
|
||||
plt.suptitle("$C^*$ curves (v1.4.0)")
|
||||
fig.show()
|
||||
|
||||
|
||||
from numpy import arctan2, degrees
|
||||
def palette_image(palette, cell_size=40, keys=None):
|
||||
if keys is None:
|
||||
names = list(palette.keys())
|
||||
else:
|
||||
names = keys
|
||||
|
||||
row_count = len(names)
|
||||
col_counts = [len(palette[n]) for n in names]
|
||||
max_cols = max(col_counts)
|
||||
|
||||
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
|
||||
h = row_count * cell_size
|
||||
w = max_cols * cell_size
|
||||
img = np.ones((h, w, 3), float)
|
||||
|
||||
for h_str in h_L_points_Cstar:
|
||||
if h_str not in accent_h_map:
|
||||
continue
|
||||
ax.fill_between(L_points, h_L_points_Cstar[h_str], alpha=0.2, color='grey', label=h_str)
|
||||
lightness_keys_per_row = []
|
||||
|
||||
x, y = L_points, h_L_points_Cstar[h_str]
|
||||
n = int(0.5*len(x))
|
||||
ax.text(x[n], y[n]-0.01, h_str, rotation=10, va='center', ha='left')
|
||||
|
||||
ax.set_xlabel("Lightness (%)")
|
||||
ax.set_xticks([L_points[0], 45, 50, 55, 60, 65, 70, L_points[-1]])
|
||||
plt.suptitle("$C^*$ curves (v1.3.0)")
|
||||
fig.show()
|
||||
for r, name in enumerate(names):
|
||||
shades = palette[name]
|
||||
keys = sorted(shades.keys())
|
||||
lightness_keys_per_row.append(keys)
|
||||
for c, k in enumerate(keys):
|
||||
col = Color(shades[k]).convert("srgb").fit(method="clip")
|
||||
rgb = [col["r"], col["g"], col["b"]]
|
||||
r0, r1 = r * cell_size, (r + 1) * cell_size
|
||||
c0, c1 = c * cell_size, (c + 1) * cell_size
|
||||
img[r0:r1, c0:c1, :] = rgb
|
||||
|
||||
return img, names, lightness_keys_per_row, cell_size, max_cols
|
||||
|
||||
|
||||
def show_palette(palette, cell_size=40, keys=None):
|
||||
img, names, keys, cell_size, max_cols = palette_image(palette, cell_size, keys=keys)
|
||||
|
||||
fig_w = img.shape[1] / 100
|
||||
fig_h = img.shape[0] / 100
|
||||
fig, ax = plt.subplots(figsize=(fig_w, fig_h))
|
||||
|
||||
ax.imshow(img, interpolation="none", origin="upper")
|
||||
ax.set_xticks([])
|
||||
|
||||
ytick_pos = [(i + 0.5) * cell_size for i in range(len(names))]
|
||||
ax.set_yticks(ytick_pos)
|
||||
ax.set_yticklabels(names)
|
||||
|
||||
ax.set_ylim(img.shape[0], 0) # ensures rows render correctly without half-cells
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from monobiome.constants import OKLCH_hL_dict
|
||||
|
||||
keys = [
|
||||
"alpine",
|
||||
"badlands",
|
||||
"chaparral",
|
||||
"savanna",
|
||||
"grassland",
|
||||
"reef",
|
||||
"tundra",
|
||||
"heathland",
|
||||
"moorland",
|
||||
"orange",
|
||||
"yellow",
|
||||
"green",
|
||||
"cyan",
|
||||
"blue",
|
||||
"violet",
|
||||
"magenta",
|
||||
"red",
|
||||
]
|
||||
term_keys = [
|
||||
"alpine",
|
||||
"badlands",
|
||||
"chaparral",
|
||||
"savanna",
|
||||
"grassland",
|
||||
"tundra",
|
||||
"red",
|
||||
"orange",
|
||||
"yellow",
|
||||
"green",
|
||||
"blue",
|
||||
]
|
||||
|
||||
show_palette(OKLCH_hL_dict, cell_size=25, keys=keys)
|
||||
# show_palette(OKLCH_hL_dict, cell_size=1, keys=term_keys)
|
||||
|
||||
238
monobiome/scheme.py
Normal file
238
monobiome/scheme.py
Normal file
@@ -0,0 +1,238 @@
|
||||
from functools import cache
|
||||
from collections.abc import Callable
|
||||
|
||||
from coloraide import Color
|
||||
|
||||
from monobiome.util import oklch_distance
|
||||
from monobiome.palette import compute_hlc_map
|
||||
from monobiome.constants import (
|
||||
accent_h_map,
|
||||
monotone_h_map,
|
||||
)
|
||||
|
||||
|
||||
@cache
|
||||
def compute_dma_map(
|
||||
dT: float,
|
||||
metric: Callable | None = None
|
||||
) -> dict[str, dict]:
|
||||
"""
|
||||
For threshold `dT`, compute the nearest accent shades that exceed that
|
||||
threshold for every monotone shade.
|
||||
|
||||
Returns: map of minimum constraint satisfying accent colors for monotone
|
||||
spectra
|
||||
|
||||
{
|
||||
"alpine": {
|
||||
"oklch( ... )": {
|
||||
"red": *nearest oklch >= dT from M base*,
|
||||
...
|
||||
},
|
||||
...
|
||||
},
|
||||
...
|
||||
}
|
||||
"""
|
||||
|
||||
if metric is None:
|
||||
metric = oklch_distance
|
||||
|
||||
oklch_hlc_map = compute_hlc_map("oklch")
|
||||
oklch_color_map = {
|
||||
c_name: [Color(c_str) for c_str in c_str_dict.values()]
|
||||
for c_name, c_str_dict in oklch_hlc_map.items()
|
||||
}
|
||||
|
||||
dT_mL_acol_map = {}
|
||||
for m_name in monotone_h_map:
|
||||
mL_acol_map = {}
|
||||
m_colors = oklch_color_map[m_name]
|
||||
|
||||
for m_color in m_colors:
|
||||
acol_min_map = {}
|
||||
|
||||
for a_name in accent_h_map:
|
||||
a_colors = oklch_color_map[a_name]
|
||||
oklch_dists = filter(
|
||||
lambda d: (d[1] - dT) >= 0,
|
||||
[
|
||||
(ac, metric(m_color, ac))
|
||||
for ac in a_colors
|
||||
]
|
||||
)
|
||||
oklch_dists = list(oklch_dists)
|
||||
if oklch_dists:
|
||||
min_a_color = min(oklch_dists, key=lambda t: t[1])[0]
|
||||
acol_min_map[a_name] = min_a_color
|
||||
|
||||
# make sure the current monotone level has *all* accents; o/w
|
||||
# ignore
|
||||
if len(acol_min_map) < len(accent_h_map):
|
||||
continue
|
||||
|
||||
mL = m_color.coords()[0]
|
||||
mL_acol_map[int(mL*100)] = acol_min_map
|
||||
dT_mL_acol_map[m_name] = mL_acol_map
|
||||
|
||||
return dT_mL_acol_map
|
||||
|
||||
def generate_scheme_groups(
|
||||
mode: str,
|
||||
biome: str,
|
||||
metric: str,
|
||||
distance: float,
|
||||
l_base: int,
|
||||
l_step: int,
|
||||
fg_gap: int,
|
||||
grey_gap: int,
|
||||
term_fg_gap: int,
|
||||
accent_color_map: dict[str, str],
|
||||
) -> tuple[dict[str, str], ...]:
|
||||
"""
|
||||
Parameters:
|
||||
mode: one of ["dark", "light"]
|
||||
biome: biome setting
|
||||
metric: one of ["wcag", "oklch", "lightness"]
|
||||
"""
|
||||
|
||||
metric_map = {
|
||||
"wcag": lambda mc,ac: ac.contrast(mc, method='wcag21'),
|
||||
"oklch": lambda mc,ac: mc.distance(ac, space="oklch"),
|
||||
"lightness": lambda mc,ac: abs(mc.coords()[0]-ac.coords()[0])*100,
|
||||
}
|
||||
|
||||
metric_func = metric_map[metric]
|
||||
dT_mL_acol_map = compute_dma_map(distance, metric=metric_func)
|
||||
Lma_map = {
|
||||
m_name: mL_acol_dict[l_base]
|
||||
for m_name, mL_acol_dict in dT_mL_acol_map.items()
|
||||
if l_base in mL_acol_dict
|
||||
}
|
||||
|
||||
# the `mL_acol_dict` only includes lightnesses where all accent colors were
|
||||
# within threshold. Coverage here will be partial if, at the `mL`, there is
|
||||
# some monotone base that doesn't have all accents within threshold. This
|
||||
# can happen at the edge, e.g., alpine@L15 has all accents w/in the
|
||||
# distance, but the red accent was too far under tundra@L15, so there's no
|
||||
# entry. This particular case is fairly rare; it's more likely that *all*
|
||||
# monotones are undefined. Either way, both such cases lead to partial
|
||||
# scheme coverage.
|
||||
if len(Lma_map) < len(monotone_h_map):
|
||||
print(f"Warning: partial scheme coverage for {l_base=}@{distance=}")
|
||||
if biome not in Lma_map:
|
||||
print(f"Biome {biome} unable to meet {metric} constraints")
|
||||
accent_colors = Lma_map.get(biome, {})
|
||||
|
||||
meta_pairs = [
|
||||
("mode", mode),
|
||||
("biome", biome),
|
||||
("metric", metric),
|
||||
("distance", distance),
|
||||
("l_base", l_base),
|
||||
("l_step", l_step),
|
||||
]
|
||||
|
||||
# note how selection_bg steps up by `l_step`, selection_fg steps down by
|
||||
# `l_step` (from their respective bases)
|
||||
term_pairs = [
|
||||
("background", f"f{{{{{biome}.l{l_base}}}}}"),
|
||||
("selection_bg", f"f{{{{{biome}.l{l_base+l_step}}}}}"),
|
||||
("selection_fg", f"f{{{{{biome}.l{l_base+term_fg_gap-l_step}}}}}"),
|
||||
("foreground", f"f{{{{{biome}.l{l_base+term_fg_gap}}}}}"),
|
||||
("cursor", f"f{{{{{biome}.l{l_base+term_fg_gap-l_step}}}}}"),
|
||||
("cursor_text", f"f{{{{{biome}.l{l_base+l_step}}}}}"),
|
||||
]
|
||||
|
||||
monotone_pairs = []
|
||||
monotone_pairs += [
|
||||
(f"bg{i}", f"f{{{{{biome}.l{l_base+i*l_step}}}}}")
|
||||
for i in range(4)
|
||||
]
|
||||
monotone_pairs += [
|
||||
(f"fg{3-i}", f"f{{{{{biome}.l{fg_gap+l_base+i*l_step}}}}}")
|
||||
for i in range(4)
|
||||
]
|
||||
|
||||
accent_pairs = [
|
||||
("black", f"f{{{{{biome}.l{l_base}}}}}"),
|
||||
("grey", f"f{{{{{biome}.l{l_base+grey_gap}}}}}"),
|
||||
("white", f"f{{{{{biome}.l{l_base+term_fg_gap-l_step}}}}}"),
|
||||
]
|
||||
for color_name, mb_accent in accent_color_map.items():
|
||||
aL = int(100*accent_colors[mb_accent].coords()[0])
|
||||
accent_pairs.append(
|
||||
(
|
||||
color_name,
|
||||
f"f{{{{{mb_accent}.l{aL}}}}}"
|
||||
)
|
||||
)
|
||||
|
||||
return meta_pairs, term_pairs, monotone_pairs, accent_pairs
|
||||
|
||||
def generate_scheme(
|
||||
mode: str,
|
||||
biome: str,
|
||||
metric: str,
|
||||
distance: float,
|
||||
l_base: int,
|
||||
l_step: int,
|
||||
fg_gap: int,
|
||||
grey_gap: int,
|
||||
term_fg_gap: int,
|
||||
full_color_map: dict[str, str],
|
||||
term_color_map: dict[str, str],
|
||||
vim_color_map: dict[str, str],
|
||||
) -> str:
|
||||
meta, _, mt, ac = generate_scheme_groups(
|
||||
mode, biome, metric, distance,
|
||||
l_base, l_step,
|
||||
fg_gap, grey_gap, term_fg_gap,
|
||||
full_color_map
|
||||
)
|
||||
|
||||
_, term, _, term_norm_ac = generate_scheme_groups(
|
||||
mode, biome, metric, distance,
|
||||
l_base + l_step, l_step,
|
||||
fg_gap, grey_gap, term_fg_gap,
|
||||
term_color_map
|
||||
)
|
||||
_, _, _, term_bright_ac = generate_scheme_groups(
|
||||
mode, biome, metric, distance,
|
||||
l_base + l_step + 10, l_step,
|
||||
fg_gap, grey_gap, term_fg_gap,
|
||||
term_color_map
|
||||
)
|
||||
|
||||
_, _, vim_mt, vim_ac = generate_scheme_groups(
|
||||
mode, biome, metric, distance,
|
||||
l_base + l_step, l_step,
|
||||
fg_gap, grey_gap, term_fg_gap,
|
||||
vim_color_map
|
||||
)
|
||||
|
||||
def pair_strings(pair_list: list[tuple[str, str]]) -> list[str]:
|
||||
return [
|
||||
f"{lhs:<12} = \"{rhs}\""
|
||||
for lhs, rhs in pair_list
|
||||
]
|
||||
|
||||
scheme_pairs = []
|
||||
scheme_pairs += pair_strings(meta)
|
||||
scheme_pairs += pair_strings(mt)
|
||||
scheme_pairs += pair_strings(ac)
|
||||
|
||||
scheme_pairs += ["", "[term]"]
|
||||
scheme_pairs += pair_strings(term)
|
||||
|
||||
scheme_pairs += ["", "[term.normal]"]
|
||||
scheme_pairs += pair_strings(term_norm_ac)
|
||||
|
||||
scheme_pairs += ["", "[term.bright]"]
|
||||
scheme_pairs += pair_strings(term_bright_ac)
|
||||
|
||||
scheme_pairs += ["", "[vim]"]
|
||||
scheme_pairs += pair_strings(vim_mt)
|
||||
scheme_pairs += pair_strings(vim_ac)
|
||||
|
||||
return "\n".join(scheme_pairs)
|
||||
@@ -1,140 +0,0 @@
|
||||
def compute_dma_map(dT, metric=None):
|
||||
"""
|
||||
For threshold `dT`, compute the nearest accent shades
|
||||
that exceed that threshold for every monotone shade.
|
||||
|
||||
Returns:
|
||||
|
||||
Map like
|
||||
{ "alpine": {
|
||||
"oklch( ... )": {
|
||||
"red": *nearest oklch >= dT from M base*
|
||||
"""
|
||||
|
||||
if metric is None:
|
||||
metric = lambda mc,ac: mc.distance(ac, space="oklch")
|
||||
|
||||
oklch_color_map = {
|
||||
c_name: [Color(c_str) for c_str in c_str_dict.values()]
|
||||
for c_name, c_str_dict in oklch_hL_dict.items()
|
||||
}
|
||||
|
||||
dT_mL_acol_map = {}
|
||||
for m_name in monotone_h_map:
|
||||
mL_acol_map = {}
|
||||
m_colors = oklch_color_map[m_name]
|
||||
|
||||
for m_color in m_colors:
|
||||
acol_min_map = {}
|
||||
|
||||
for a_name in accent_h_map:
|
||||
a_colors = oklch_color_map[a_name]
|
||||
oklch_dists = filter(
|
||||
lambda d: (d[1] - dT) > 0,
|
||||
[
|
||||
(ac, metric(m_color, ac))
|
||||
for ac in a_colors
|
||||
]
|
||||
)
|
||||
oklch_dists = list(oklch_dists)
|
||||
if oklch_dists:
|
||||
min_a_color = min(oklch_dists, key=lambda t: t[1])[0]
|
||||
acol_min_map[a_name] = min_a_color
|
||||
|
||||
# make sure the current monotone level has *all* accents; o/w ignore
|
||||
if len(acol_min_map) < len(accent_h_map):
|
||||
continue
|
||||
|
||||
mL = m_color.coords()[0]
|
||||
mL_acol_map[int(mL*100)] = acol_min_map
|
||||
dT_mL_acol_map[m_name] = mL_acol_map
|
||||
return dT_mL_acol_map
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
mode = "dark" # ["dark", "light"]
|
||||
biome = "alpine" # [ ... ]
|
||||
metric = "wcag" # ["wcag", "oklch"]
|
||||
metric_map = {
|
||||
"wcag": lambda mc,ac: ac.contrast(mc, method='wcag21'),
|
||||
"oklch": lambda mc,ac: mc.distance(ac, space="oklch"),
|
||||
}
|
||||
metric_func = metric_map[metric]
|
||||
|
||||
term_color_map = {
|
||||
"red": "red",
|
||||
"organge": "orange",
|
||||
"yellow": "yellow",
|
||||
"green": "green",
|
||||
"cyan": "green",
|
||||
"blue": "blue",
|
||||
"violet": "blue",
|
||||
"magenta": "red",
|
||||
}
|
||||
|
||||
L = 20
|
||||
d = 4.5
|
||||
I = 5
|
||||
fg_gap = 50
|
||||
grey_gap = 30
|
||||
|
||||
dT_mL_acol_map = compute_dma_map(d, metric=metric_func)
|
||||
Lma_map = {
|
||||
m_name: mL_acol_dict[L]
|
||||
for m_name, mL_acol_dict in dT_mL_acol_map.items()
|
||||
if L in mL_acol_dict
|
||||
}
|
||||
|
||||
# the `mL_acol_dict` only includes lightnesses where all accent
|
||||
# colors were within threshold. Coverage here will be partial if,
|
||||
# at the `mL`, there is some monotone base that doesn't have all
|
||||
# accents within threshold. This can happen at the edge, e.g., alpine@L15
|
||||
# has all accents w/in the distance, but the red accent was too far under
|
||||
# tundra@L15, so there's no entry. This particular case is fairly rare; it's
|
||||
# more likely that *all* monotones are undefined. Either way, both such
|
||||
# cases lead to partial scheme coverage.
|
||||
if len(Lma_map) < len(monotone_h_map):
|
||||
print(f"Warning: partial scheme coverage for {mL=}@{dT=}")
|
||||
if biome not in Lma_map:
|
||||
print(f"Biome {biome} unable to meet {metric} constraints")
|
||||
accent_colors = Lma_map.get(biome, {})
|
||||
|
||||
scheme_pairs = []
|
||||
for i in range(4):
|
||||
scheme_pairs.append(
|
||||
(
|
||||
f"bg{i}",
|
||||
f"f{{{{{biome}.l{L+i*I}}}}}"
|
||||
)
|
||||
)
|
||||
for i in range(4):
|
||||
scheme_pairs.append(
|
||||
(
|
||||
f"fg{3-i}",
|
||||
f"f{{{{{biome}.l{fg_gap+L+i*I}}}}}"
|
||||
)
|
||||
)
|
||||
for term_color, mb_accent in term_color_map.items():
|
||||
aL = int(100*accent_colors[mb_accent].coords()[0])
|
||||
scheme_pairs.append(
|
||||
(
|
||||
f"{term_color}",
|
||||
f"f{{{{{mb_accent}.l{aL}}}}}"
|
||||
)
|
||||
)
|
||||
|
||||
term_fg_gap = 60
|
||||
scheme_pairs.extend([
|
||||
("background", f"f{{{{{biome}.l{L}}}}}"),
|
||||
("selection_bg", f"f{{{{{biome}.l{L+I}}}}}"),
|
||||
("selection_fg", f"f{{{{{biome}.l{L+term_fg_gap}}}}}"),
|
||||
("foreground", f"f{{{{{biome}.l{L+term_fg_gap+I}}}}}"),
|
||||
])
|
||||
|
||||
scheme_toml = [
|
||||
f"{lhs:<12} = {rhs:<16}"
|
||||
for lhs, rhs in scheme_pairs
|
||||
]
|
||||
10
monobiome/util.py
Normal file
10
monobiome/util.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from types import GenericAlias
|
||||
from argparse import ArgumentParser, _SubParsersAction
|
||||
|
||||
from coloraide import Color
|
||||
|
||||
_SubParsersAction.__class_getitem__ = classmethod(GenericAlias)
|
||||
_SubparserType = _SubParsersAction[ArgumentParser]
|
||||
|
||||
def oklch_distance(mc: Color, ac: Color) -> float:
|
||||
return mc.distance(ac, space="oklch")
|
||||
@@ -1,39 +0,0 @@
|
||||
|
||||
# put together objects for output formats
|
||||
toml_lines = []
|
||||
oklch_hL_dict = {}
|
||||
|
||||
for h_str, L_points_Cstar in h_L_points_Cstar.items():
|
||||
_h = h_map[h_str]
|
||||
toml_lines.append(f"[{h_str}]")
|
||||
oklch_hL_dict[h_str] = {}
|
||||
|
||||
for _L, _C in zip(L_points, L_points_Cstar):
|
||||
oklch = Color('oklch', [_L/100, _C, _h])
|
||||
srgb = oklch.convert('srgb')
|
||||
|
||||
hex_str = srgb.to_string(hex=True)
|
||||
|
||||
l, c, h = oklch.convert('oklch').coords()
|
||||
# oklch_str = oklch.to_string(percent=False)
|
||||
oklch_str = f"oklch({l*100:.1f}% {c:.4f} {h:.1f})"
|
||||
|
||||
toml_lines.append(f'l{_L} = "{hex_str}"')
|
||||
oklch_hL_dict[h_str][_L] = oklch_str
|
||||
|
||||
toml_lines.append("")
|
||||
|
||||
|
||||
|
||||
# write files -- QBR = "quadratic bezier rational"
|
||||
PALETTE_DIR = "palettes"
|
||||
|
||||
toml_content = '\n'.join(toml_lines)
|
||||
with Path(PALETTE_DIR, 'monobiome-vQBRsn-130.toml').open('w') as f:
|
||||
f.write(toml_content)
|
||||
print("[TOML] written")
|
||||
|
||||
with Path(PALETTE_DIR, 'monobiome-vQBRsn-130-oklch.json').open('w') as f:
|
||||
json.dump(oklch_hL_dict, f)
|
||||
print("[JSON] written")
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "monobiome"
|
||||
version = "1.2.0"
|
||||
description = "Add your description here"
|
||||
version = "1.4.0"
|
||||
description = "Monobiome color palette"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
@@ -21,3 +25,39 @@ dependencies = [
|
||||
dev = [
|
||||
"ipykernel>=7.0.1",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
monobiome = "monobiome.__main__:main"
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://doc.olog.io/monobiome"
|
||||
Documentation = "https://doc.olog.io/monobiome"
|
||||
Repository = "https://git.olog.io/olog/monobiome"
|
||||
Issues = "https://git.olog.io/olog/monobiome/issues"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["monobiome*"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
"monobiome" = ["data/*.toml"]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 79
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["ANN", "E", "F", "UP", "B", "SIM", "I", "C4", "PERF"]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
length-sort = true
|
||||
order-by-type = false
|
||||
force-sort-within-sections = false
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/**" = ["S101"]
|
||||
"**/__init__.py" = ["F401"]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
docstring-code-format = true
|
||||
|
||||
|
||||
Reference in New Issue
Block a user