feat: implement sqlalchemy backend

Co-authored-by: Loan Robert <loan@yaal.coop>
This commit is contained in:
Éloi Rivard 2023-11-15 16:37:06 +01:00
parent d97459dbac
commit 27639081f0
No known key found for this signature in database
GPG key ID: 7EDA204EA57DD184
25 changed files with 1321 additions and 23 deletions

View file

@ -7,6 +7,7 @@ Added
***** *****
- Refresh token grant supports other client authentication methods. :pr:`157` - Refresh token grant supports other client authentication methods. :pr:`157`
- Implement a SQLAlchemy backend. :pr:`158`
Changed Changed
******* *******

View file

@ -34,9 +34,10 @@ The canaille server has some default users:
Backends Backends
~~~~~~~~ ~~~~~~~~
Canaille comes with two backends: Canaille comes with several backends:
- a lightweight test purpose `memory` backend - a lightweight test purpose `memory` backend
- a `sql` backend, based on sqlalchemy
- a production-ready `LDAP` backend - a production-ready `LDAP` backend
Docker environment Docker environment
@ -58,9 +59,12 @@ To launch containers, use:
.. code-block:: console .. code-block:: console
cd demo cd demo
# To run the demo with the memory backend: # To run the demo with the sql backend:
docker compose up docker compose up
# To run the demo with the memory backend:
docker compose --file docker-compose-memory.yml up
# To run the demo with the LDAP backend: # To run the demo with the LDAP backend:
docker compose --file docker-compose-ldap.yml up docker compose --file docker-compose-ldap.yml up
@ -69,19 +73,20 @@ Local environment
.. code-block:: console .. code-block:: console
# To run the demo with the memory backend: # To run the demo with the sql backend:
./demo/run.sh ./demo/run.sh
If you want to run the demo locally with the LDAP backend, you need to have # To run the demo with the memory backend:
OpenLDAP installed on your system. ./demo/run.sh --backend memory
.. code-block:: console
# To run the demo with the LDAP backend: # To run the demo with the LDAP backend:
./demo/run.sh --backend ldap ./demo/run.sh --backend ldap
.. warning :: .. note ::
If you want to run the demo locally with the LDAP backend, you need to have
OpenLDAP installed on your system.
.. warning ::
On Debian or Ubuntu systems, the OpenLDAP `slapd` binary usage might be restricted by apparmor, On Debian or Ubuntu systems, the OpenLDAP `slapd` binary usage might be restricted by apparmor,
and thus makes the tests and the demo fail. This can be mitigated by removing apparmor restrictions and thus makes the tests and the demo fail. This can be mitigated by removing apparmor restrictions
on `slapd`. on `slapd`.
@ -105,7 +110,7 @@ users and groups with the ``populate`` command:
# If running in local environment # If running in local environment
env CONFIG=conf/canaille-ldap.toml poetry run canaille populate --nb 100 users env CONFIG=conf/canaille-ldap.toml poetry run canaille populate --nb 100 users
Note that this will not work with the memory backend. Adapt to use either the `ldap` or the `sql` configuration file. Note that this will not work with the memory backend.
Unit tests Unit tests
---------- ----------
@ -113,7 +118,7 @@ Unit tests
To run the tests, you just can run `poetry run pytest` and/or `tox` to test all the supported python environments. To run the tests, you just can run `poetry run pytest` and/or `tox` to test all the supported python environments.
Everything must be green before patches get merged. Everything must be green before patches get merged.
To test a specific backend you can pass ``--backend memory`` or ``--backend ldap`` to pytest and tox. To test a specific backend you can pass ``--backend memory``, ``--backend sql`` or ``--backend ldap`` to pytest and tox.
The test coverage is 100%, patches won't be accepted if not entirely covered. You can check the The test coverage is 100%, patches won't be accepted if not entirely covered. You can check the
test coverage with ``poetry run pytest --cov --cov-report=html`` or ``tox -e coverage -- --cov-report=html``. test coverage with ``poetry run pytest --cov --cov-report=html`` or ``tox -e coverage -- --cov-report=html``.

View file

View file

@ -0,0 +1,49 @@
from canaille.backends import BaseBackend
from sqlalchemy import create_engine
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import Session
Base = declarative_base()
def db_session(db_uri=None, init=False):
engine = create_engine(db_uri, echo=False, future=True)
if init:
Base.metadata.create_all(engine)
session = Session(engine)
return session
class Backend(BaseBackend):
db_session = None
@classmethod
def install(cls, config, debug=False): # pragma: no cover
engine = create_engine(
config["BACKENDS"]["SQL"]["SQL_DATABASE_URI"],
echo=False,
future=True,
)
Base.metadata.create_all(engine)
def setup(self, init=False):
if not self.db_session:
self.db_session = db_session(
self.config["BACKENDS"]["SQL"]["SQL_DATABASE_URI"],
init=init,
)
def teardown(self):
pass
@classmethod
def validate(cls, config):
pass
@classmethod
def login_placeholder(cls):
return ""
def has_account_lockability(self):
return True

View file

@ -0,0 +1,378 @@
import datetime
import uuid
from typing import List
import canaille.core.models
import canaille.oidc.models
from canaille.app import models
from canaille.backends.models import Model
from flask import current_app
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import or_
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import mapped_column
from sqlalchemy.orm import reconstructor
from sqlalchemy.orm import relationship
from sqlalchemy_json import MutableJson
from .backend import Backend
from .backend import Base
from .utils import TZDateTime
class SqlAlchemyModel(Model):
def __html__(self):
return self.id
def __repr__(self):
return (
f"<{self.__class__.__name__} {self.identifier_attribute}={self.identifier}>"
)
@classmethod
def query(cls, **kwargs):
filter = [
cls.attribute_filter(attribute_name, expected_value)
for attribute_name, expected_value in kwargs.items()
]
return (
Backend.get()
.db_session.execute(select(cls).filter(*filter))
.scalars()
.all()
)
@classmethod
def fuzzy(cls, query, attributes=None, **kwargs):
attributes = attributes or cls.attributes
filter = or_(
getattr(cls, attribute_name).ilike(f"%{query}%")
for attribute_name in attributes
if "str" in str(cls.__annotations__[attribute_name])
)
return (
Backend.get().db_session.execute(select(cls).filter(filter)).scalars().all()
)
@classmethod
def attribute_filter(cls, name, value):
if isinstance(value, list):
return or_(cls.attribute_filter(name, v) for v in value)
multiple = "List" in str(cls.__annotations__[name])
if multiple:
return getattr(cls, name).contains(value)
return getattr(cls, name) == value
@classmethod
def get(cls, identifier=None, **kwargs):
if identifier:
kwargs[cls.identifier_attribute] = identifier
filter = [
cls.attribute_filter(attribute_name, expected_value)
for attribute_name, expected_value in kwargs.items()
]
return (
Backend.get()
.db_session.execute(select(cls).filter(*filter))
.scalar_one_or_none()
)
@property
def identifier(self):
return getattr(self, self.identifier_attribute)
def save(self):
Backend.get().db_session.add(self)
Backend.get().db_session.commit()
def delete(self):
Backend.get().db_session.delete(self)
Backend.get().db_session.commit()
def reload(self):
Backend.get().db_session.refresh(self)
membership_association_table = Table(
"membership_association_table",
Base.metadata,
Column("user_id", ForeignKey("user.id"), primary_key=True),
Column("group_id", ForeignKey("group.id"), primary_key=True),
)
class User(canaille.core.models.User, Base, SqlAlchemyModel):
__tablename__ = "user"
identifier_attribute = "user_name"
id: Mapped[str] = mapped_column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
user_name: Mapped[str] = mapped_column(String, unique=True, nullable=False)
password: Mapped[str] = mapped_column(String, nullable=True)
preferred_language: Mapped[str] = mapped_column(String, nullable=True)
family_name: Mapped[str] = mapped_column(String, nullable=True)
given_name: Mapped[str] = mapped_column(String, nullable=True)
formatted_name: Mapped[str] = mapped_column(String, nullable=True)
display_name: Mapped[str] = mapped_column(String, nullable=True)
emails: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
phone_numbers: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
formatted_address: Mapped[str] = mapped_column(String, nullable=True)
street: Mapped[str] = mapped_column(String, nullable=True)
postal_code: Mapped[str] = mapped_column(String, nullable=True)
locality: Mapped[str] = mapped_column(String, nullable=True)
region: Mapped[str] = mapped_column(String, nullable=True)
photo: Mapped[bytes] = mapped_column(LargeBinary, nullable=True)
profile_url: Mapped[str] = mapped_column(String, nullable=True)
employee_number: Mapped[str] = mapped_column(String, nullable=True)
department: Mapped[str] = mapped_column(String, nullable=True)
title: Mapped[str] = mapped_column(String, nullable=True)
organization: Mapped[str] = mapped_column(String, nullable=True)
last_modified: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
groups: Mapped[List["Group"]] = relationship(
secondary=membership_association_table, back_populates="members"
)
lock_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.load_permissions()
def reload(self):
super().reload()
self.load_permissions()
@reconstructor
def load_permissions(self):
self.permissions = set()
self.read = set()
self.write = set()
for access_group_name, details in current_app.config["ACL"].items():
if self.match_filter(details.get("FILTER")):
self.permissions |= set(details.get("PERMISSIONS", []))
self.read |= set(details.get("READ", []))
self.write |= set(details.get("WRITE", []))
def normalize_filter_value(self, attribute, value):
# not super generic, but we can improve this when we have
# type checking and/or pydantic for the models
if attribute == "groups":
if models.Group.get(id=value):
return models.Group.get(id=value)
elif models.Group.get(display_name=value):
return models.Group.get(display_name=value)
return value
def match_filter(self, filter):
if filter is None:
return True
if isinstance(filter, dict):
return all(
self.normalize_filter_value(attribute, value)
in getattr(self, attribute, [])
if "List" in str(self.__annotations__[attribute])
else self.normalize_filter_value(attribute, value)
== getattr(self, attribute, None)
for attribute, value in filter.items()
)
return any(self.match_filter(subfilter) for subfilter in filter)
@classmethod
def get_from_login(cls, login=None, **kwargs):
return User.get(user_name=login)
def has_password(self):
return bool(self.password)
def check_password(self, password):
if password not in self.password:
return (False, None)
if self.locked:
return (False, "Your account has been locked.")
return (True, None)
def set_password(self, password):
self.password = password
self.save()
def save(self):
self.last_modified = datetime.datetime.now(datetime.timezone.utc).replace(
microsecond=0
)
super().save()
class Group(canaille.core.models.Group, Base, SqlAlchemyModel):
__tablename__ = "group"
identifier_attribute = "display_name"
id: Mapped[str] = mapped_column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
display_name: Mapped[str] = mapped_column(String)
description: Mapped[str] = mapped_column(String, nullable=True)
members: Mapped[List["User"]] = relationship(
secondary=membership_association_table, back_populates="groups"
)
client_audience_association_table = Table(
"client_audience_association_table",
Base.metadata,
Column("audience_id", ForeignKey("client.id"), primary_key=True, nullable=True),
Column("client_id", ForeignKey("client.id"), primary_key=True, nullable=True),
)
class Client(canaille.oidc.models.Client, Base, SqlAlchemyModel):
__tablename__ = "client"
id: Mapped[str] = mapped_column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
identifier_attribute = "client_id"
description: Mapped[str] = mapped_column(String, nullable=True)
preconsent: Mapped[bool] = mapped_column(Boolean, nullable=True)
post_logout_redirect_uris: Mapped[List[str]] = mapped_column(
MutableJson, nullable=True
)
audience: Mapped[List["Client"]] = relationship(
"Client",
secondary=client_audience_association_table,
primaryjoin=id == client_audience_association_table.c.client_id,
secondaryjoin=id == client_audience_association_table.c.audience_id,
)
client_id: Mapped[str] = mapped_column(String, nullable=True)
client_secret: Mapped[str] = mapped_column(String, nullable=True)
client_id_issued_at: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
client_secret_expires_at: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
client_name: Mapped[str] = mapped_column(String, nullable=True)
contacts: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
client_uri: Mapped[str] = mapped_column(String, nullable=True)
redirect_uris: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
logo_uri: Mapped[str] = mapped_column(String, nullable=True)
grant_types: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
response_types: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
scope: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
tos_uri: Mapped[str] = mapped_column(String, nullable=True)
policy_uri: Mapped[str] = mapped_column(String, nullable=True)
jwks_uri: Mapped[str] = mapped_column(String, nullable=True)
jwk: Mapped[str] = mapped_column(String, nullable=True)
token_endpoint_auth_method: Mapped[str] = mapped_column(String, nullable=True)
software_id: Mapped[str] = mapped_column(String, nullable=True)
software_version: Mapped[str] = mapped_column(String, nullable=True)
class AuthorizationCode(canaille.oidc.models.AuthorizationCode, Base, SqlAlchemyModel):
__tablename__ = "authorization_code"
identifier_attribute = "authorization_code_id"
id: Mapped[str] = mapped_column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
authorization_code_id: Mapped[str] = mapped_column(String, nullable=True)
code: Mapped[str] = mapped_column(String, nullable=True)
client_id: Mapped[str] = mapped_column(ForeignKey("client.id"))
client: Mapped["Client"] = relationship()
subject_id: Mapped[str] = mapped_column(ForeignKey("user.id"))
subject: Mapped["User"] = relationship()
redirect_uri: Mapped[str] = mapped_column(String, nullable=True)
response_type: Mapped[str] = mapped_column(String, nullable=True)
scope: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
nonce: Mapped[str] = mapped_column(String, nullable=True)
issue_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
lifetime: Mapped[int] = mapped_column(Integer, nullable=True)
challenge: Mapped[str] = mapped_column(String, nullable=True)
challenge_method: Mapped[str] = mapped_column(String, nullable=True)
revokation_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
token_audience_association_table = Table(
"token_audience_association_table",
Base.metadata,
Column("token_id", ForeignKey("token.id"), primary_key=True, nullable=True),
Column("client_id", ForeignKey("client.id"), primary_key=True, nullable=True),
)
class Token(canaille.oidc.models.Token, Base, SqlAlchemyModel):
__tablename__ = "token"
identifier_attribute = "token_id"
id: Mapped[str] = mapped_column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
token_id: Mapped[str] = mapped_column(String, nullable=True)
access_token: Mapped[str] = mapped_column(String, nullable=True)
client_id: Mapped[str] = mapped_column(ForeignKey("client.id"))
client: Mapped["Client"] = relationship()
subject_id: Mapped[str] = mapped_column(ForeignKey("user.id"))
subject: Mapped["User"] = relationship()
type: Mapped[str] = mapped_column(String, nullable=True)
refresh_token: Mapped[str] = mapped_column(String, nullable=True)
scope: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
issue_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
lifetime: Mapped[int] = mapped_column(Integer, nullable=True)
revokation_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
audience: Mapped[List["Client"]] = relationship(
"Client",
secondary=token_audience_association_table,
primaryjoin=id == token_audience_association_table.c.token_id,
secondaryjoin=Client.id == token_audience_association_table.c.client_id,
)
class Consent(canaille.oidc.models.Consent, Base, SqlAlchemyModel):
__tablename__ = "consent"
identifier_attribute = "consent_id"
id: Mapped[str] = mapped_column(
String, primary_key=True, default=lambda: str(uuid.uuid4())
)
consent_id: Mapped[str] = mapped_column(String, nullable=True)
subject_id: Mapped[str] = mapped_column(ForeignKey("user.id"))
subject: Mapped["User"] = relationship()
client_id: Mapped[str] = mapped_column(ForeignKey("client.id"))
client: Mapped["Client"] = relationship()
scope: Mapped[List[str]] = mapped_column(MutableJson, nullable=True)
issue_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)
revokation_date: Mapped[datetime.datetime] = mapped_column(
TZDateTime(timezone=True), nullable=True
)

View file

@ -0,0 +1,19 @@
import datetime
from sqlalchemy import DateTime
from sqlalchemy import TypeDecorator
class TZDateTime(TypeDecorator):
impl = DateTime
cache_ok = True
def process_bind_param(self, value, dialect):
if value is not None:
value = value.astimezone(datetime.timezone.utc).replace(tzinfo=None)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = value.replace(tzinfo=datetime.timezone.utc)
return value

View file

@ -77,15 +77,20 @@ SECRET_KEY = "change me before you go in production"
# written in the standard error output. # written in the standard error output.
# PATH = "" # PATH = ""
[BACKENDS.LDAP] # [BACKENDS.SQL]
URI = "ldap://ldap" # The SQL database connection string
ROOT_DN = "dc=mydomain,dc=tld" # Details on https://docs.sqlalchemy.org/en/20/core/engines.html
BIND_DN = "cn=admin,dc=mydomain,dc=tld" # SQL_DATABASE_URI = "postgresql://user:password@localhost/database"
BIND_PW = "admin"
# [BACKENDS.LDAP]
# URI = "ldap://ldap"
# ROOT_DN = "dc=mydomain,dc=tld"
# BIND_DN = "cn=admin,dc=mydomain,dc=tld"
# BIND_PW = "admin"
# TIMEOUT = # TIMEOUT =
# Where to search for users? # Where to search for users?
USER_BASE = "ou=users,dc=mydomain,dc=tld" # USER_BASE = "ou=users,dc=mydomain,dc=tld"
# The object class to use for creating new users # The object class to use for creating new users
# USER_CLASS = "inetOrgPerson" # USER_CLASS = "inetOrgPerson"
@ -99,7 +104,7 @@ USER_BASE = "ou=users,dc=mydomain,dc=tld"
# USER_FILTER = "(|(uid={{ login }})(mail={{ login }}))" # USER_FILTER = "(|(uid={{ login }})(mail={{ login }}))"
# Where to search for groups? # Where to search for groups?
GROUP_BASE = "ou=groups,dc=mydomain,dc=tld" # GROUP_BASE = "ou=groups,dc=mydomain,dc=tld"
# The object class to use for creating new groups # The object class to use for creating new groups
# GROUP_CLASS = "groupOfNames" # GROUP_CLASS = "groupOfNames"

View file

@ -14,4 +14,4 @@ RUN pip install poetry
WORKDIR /opt/canaille WORKDIR /opt/canaille
RUN poetry install --with demo --without dev --extras all RUN poetry install --with demo --without dev --extras all
ENTRYPOINT ["poetry", "run", "flask", "run", "--host=0.0.0.0", "--extra-files", "/opt/canaille/conf/canaille-memory.toml", "/opt/canaille/conf/canaille-ldap.toml"] ENTRYPOINT ["poetry", "run", "flask", "run", "--host=0.0.0.0", "--extra-files", "/opt/canaille/conf/canaille-memory.toml", "--extra-files", "/opt/canaille/conf/canaille-ldap.toml", "--extra-files", "/opt/canaille/conf/canaille-sql.toml"]

3
demo/Procfile-sql Normal file
View file

@ -0,0 +1,3 @@
canaille: env FLASK_DEBUG=1 AUTHLIB_INSECURE_TRANSPORT=1 CONFIG=conf/canaille-sql.toml FLASK_APP=demoapp flask run --extra-files conf/canaille-sql.toml
client1: env FLASK_DEBUG=1 CONFIG=../conf/client1.cfg FLASK_APP=client flask run --port=5001
client2: env FLASK_DEBUG=1 CONFIG=../conf/client2.cfg FLASK_APP=client flask run --port=5002

View file

@ -78,6 +78,11 @@ LEVEL = "DEBUG"
# written in the standard error output. # written in the standard error output.
# PATH = "" # PATH = ""
# [BACKENDS.SQL]
# The SQL database connection string
# Details on https://docs.sqlalchemy.org/en/20/core/engines.html
# SQL_DATABASE_URI = "postgresql://user:password@localhost/database"
[BACKENDS.LDAP] [BACKENDS.LDAP]
URI = "ldap://ldap:389" URI = "ldap://ldap:389"
ROOT_DN = "dc=mydomain,dc=tld" ROOT_DN = "dc=mydomain,dc=tld"

View file

@ -78,6 +78,11 @@ LEVEL = "DEBUG"
# written in the standard error output. # written in the standard error output.
# PATH = "" # PATH = ""
# [BACKENDS.SQL]
# The SQL database connection string
# Details on https://docs.sqlalchemy.org/en/20/core/engines.html
# SQL_DATABASE_URI = "postgresql://user:password@localhost/database"
# [BACKENDS.LDAP] # [BACKENDS.LDAP]
# URI = "ldap://ldap:389" # URI = "ldap://ldap:389"
# ROOT_DN = "dc=mydomain,dc=tld" # ROOT_DN = "dc=mydomain,dc=tld"

View file

@ -0,0 +1,265 @@
# All the Flask configuration values can be used:
# https://flask.palletsprojects.com/en/2.3.x/config/#builtin-configuration-values
# The flask secret key for cookies. You MUST change this.
SECRET_KEY = "change me before you go in production"
# Your organization name.
NAME = "Canaille"
# The interface on which canaille will be served
# SERVER_NAME = "auth.mydomain.tld"
# PREFERRED_URL_SCHEME = "https"
# You can display a logo to be recognized on login screens
LOGO = "/static/img/canaille-head.png"
# Your favicon. If unset the LOGO will be used.
FAVICON = "/static/img/canaille-c.png"
# The name of a theme in the 'theme' directory, or an absolute path
# to a theme. Defaults to 'default'. Theming is done with
# https://github.com/tktech/flask-themer
# THEME = "default"
# If unset, language is detected
# LANGUAGE = "en"
# The timezone in which datetimes will be displayed to the users.
# If unset, the server timezone will be used.
# TIMEZONE = UTC
# If you have a sentry instance, you can set its dsn here:
# SENTRY_DSN = "https://examplePublicKey@o0.ingest.sentry.io/0"
# Enables javascript to smooth the user experience
# JAVASCRIPT = true
# Accelerates webpages with async requests
# HTMX = true
# If EMAIL_CONFIRMATION is set to true, users will need to click on a
# confirmation link sent by email when they want to add a new email.
# By default, this is true if SMTP is configured, else this is false.
# If explicitely set to true and SMTP is disabled, the email field
# will be read-only.
# EMAIL_CONFIRMATION =
# If ENABLE_REGISTRATION is true, then users can freely create an account
# at this instance. If email verification is available, users must confirm
# their email before the account is created.
ENABLE_REGISTRATION = true
# If HIDE_INVALID_LOGINS is set to true (the default), when a user
# tries to sign in with an invalid login, a message is shown indicating
# that the password is wrong, but does not give a clue wether the login
# exists or not.
# If HIDE_INVALID_LOGINS is set to false, when a user tries to sign in with
# an invalid login, a message is shown indicating that the login does not
# exist.
# HIDE_INVALID_LOGINS = true
# If ENABLE_PASSWORD_RECOVERY is false, then users cannot ask for a password
# recovery link by email. This option is true by default.
# ENABLE_PASSWORD_RECOVERY = true
# The validity duration of registration invitations, in seconds.
# Defaults to 2 days
# INVITATION_EXPIRATION = 172800
[LOGGING]
# LEVEL can be one value among:
# DEBUG, INFO, WARNING, ERROR, CRITICAL
# Defaults to WARNING
# LEVEL = "WARNING"
LEVEL = "DEBUG"
# The path of the log file. If not set (the default) logs are
# written in the standard error output.
# PATH = ""
[BACKENDS.SQL]
# The SQL database connection string
# Details on https://docs.sqlalchemy.org/en/20/core/engines.html
SQL_DATABASE_URI = "sqlite:///demo.sqlite"
# [BACKENDS.LDAP]
# URI = "ldap://ldap:389"
# ROOT_DN = "dc=mydomain,dc=tld"
# BIND_DN = "cn=admin,dc=mydomain,dc=tld"
# BIND_PW = "admin"
# TIMEOUT = 10
# Where to search for users?
# USER_BASE = "ou=users,dc=mydomain,dc=tld"
# The object class to use for creating new users
# USER_CLASS = "inetOrgPerson"
# The attribute to identify an object in the User dn.
# USER_RDN = "uid"
# Filter to match users on sign in. Jinja syntax is supported
# and a `login` variable is available containing the value
# passed in the login field.
# USER_FILTER = "(|(uid={{ login }})(mail={{ login }}))"
# Where to search for groups?
# GROUP_BASE = "ou=groups,dc=mydomain,dc=tld"
# The object class to use for creating new groups
# GROUP_CLASS = "groupOfNames"
# The attribute to identify an object in the User dn.
# GROUP_RDN = "cn"
# The attribute to use to identify a group
# GROUP_NAME_ATTRIBUTE = "cn"
[ACL]
# You can define access controls that define what users can do on canaille
# An access control consists in a FILTER to match users, a list of PERMISSIONS
# matched users will be able to perform, and fields users will be able
# to READ and WRITE. Users matching several filters will cumulate permissions.
#
# 'FILTER' parameter can be:
# - absent, in which case all the users will match this access control
# - a mapping where keys are user attributes name and the values those user
# attribute values. All the values must be matched for the user to be part
# of the access control.
# - a list of those mappings. If a user values match at least one mapping,
# then the user will be part of the access control
#
# Here are some examples
# FILTER = {user_name = 'admin'}
# FILTER =
# - {groups = 'admins'}
# - {groups = 'moderators'}
#
# The 'PERMISSIONS' parameter that is an list of items the users in the access
# control will be able to manage. 'PERMISSIONS' is optionnal. Values can be:
# - "edit_self" to allow users to edit their own profile
# - "use_oidc" to allow OpenID Connect authentication
# - "manage_oidc" to allow OpenID Connect client managements
# - "manage_users" to allow other users management
# - "manage_groups" to allow group edition and creation
# - "delete_account" allows a user to delete his own account. If used with
# manage_users, the user can delete any account
# - "impersonate_users" to allow a user to take the identity of another user
#
# The 'READ' and 'WRITE' attributes are the LDAP attributes of the user
# object that users will be able to read and/or write.
[ACL.DEFAULT]
PERMISSIONS = ["edit_self", "use_oidc"]
READ = [
"user_name",
"groups",
"lock_date",
]
WRITE = [
"photo",
"given_name",
"family_name",
"display_name",
"password",
"phone_numbers",
"emails",
"profile_url",
"formatted_address",
"street",
"postal_code",
"locality",
"region",
"preferred_language",
"employee_number",
"department",
"title",
"organization",
]
[ACL.ADMIN]
FILTER = {groups = "admins"}
PERMISSIONS = [
"manage_users",
"manage_groups",
"manage_oidc",
"delete_account",
"impersonate_users",
]
WRITE = [
"groups",
"lock_date",
]
[ACL.HALF_ADMIN]
FILTER = {groups = "moderators"}
PERMISSIONS = ["manage_users", "manage_groups", "delete_account"]
WRITE = ["groups"]
[OIDC]
# Wether a token is needed for the RFC7591 dynamical client registration.
# If true, no token is needed to register a client.
# If false, dynamical client registration needs a token defined
# in DYNAMIC_CLIENT_REGISTRATION_TOKENS
DYNAMIC_CLIENT_REGISTRATION_OPEN = true
# A list of tokens that can be used for dynamic client registration
DYNAMIC_CLIENT_REGISTRATION_TOKENS = [
"xxxxxxx-yyyyyyy-zzzzzz",
]
# REQUIRE_NONCE force the nonce exchange during the authentication flows.
# This adds security but may not be supported by all clients.
# REQUIRE_NONCE = true
[OIDC.JWT]
# PRIVATE_KEY_FILE and PUBLIC_KEY_FILE are the paths to the private and
# the public key. You can generate a RSA keypair with:
# openssl genrsa -out private.pem 4096
# openssl rsa -in private.pem -pubout -outform PEM -out public.pem
# If the variables are unset, and debug mode is enabled,
# a in-memory keypair will be used.
# PRIVATE_KEY_FILE = "/path/to/private.pem"
# PUBLIC_KEY_FILE = "/path/to/public.pem"
# The URI of the identity provider
# ISS = "https://auth.mydomain.tld"
# The key type parameter
# KTY = "RSA"
# The key algorithm
# ALG = "RS256"
# The time the JWT will be valid, in seconds
# EXP = 3600
[OIDC.JWT.MAPPING]
# Mapping between JWT fields and LDAP attributes from your
# User objectClass.
# {attribute} will be replaced by the user ldap attribute value.
# Default values fits inetOrgPerson.
# SUB = "{{ user.user_name }}"
# NAME = "{{ user.formatted_name }}"
# PHONE_NUMBER = "{{ user.phone_numbers[0] }}"
# EMAIL = "{{ user.preferred_email }}"
# GIVEN_NAME = "{{ user.given_name }}"
# FAMILY_NAME = "{{ user.family_name }}"
# PREFERRED_USERNAME = "{{ user.display_name }}"
# LOCALE = "{{ user.preferred_language }}"
# ADDRESS = "{{ user.formatted_address }}"
# PICTURE = "{% if user.photo %}{{ url_for('core.account.photo', user=user, field='photo', _external=True) }}{% endif %}"
# WEBSITE = "{{ user.profile_url }}"
# The SMTP server options. If not set, mail related features such as
# user invitations, and password reset emails, will be disabled.
[SMTP]
# HOST = "localhost"
# PORT = 25
# TLS = false
# SSL = false
# LOGIN = ""
# PASSWORD = ""
# FROM_ADDR = "admin@mydomain.tld"
# The registration options. If not set, registration will be disabled. Requires SMTP to work.
# Groups should be formatted like this: ["<GROUP_NAME_ATTRIBUTE>=group_name,<GROUP_BASE>", ...]
# [REGISTRATION]
# GROUPS=[]
# CAN_EDIT_USERNAME = false

View file

@ -78,6 +78,11 @@ LEVEL = "DEBUG"
# written in the standard error output. # written in the standard error output.
# PATH = "" # PATH = ""
# [BACKENDS.SQL]
# The SQL database connection string
# Details on https://docs.sqlalchemy.org/en/20/core/engines.html
# SQL_DATABASE_URI = "postgresql://user:password@localhost/database"
[BACKENDS.LDAP] [BACKENDS.LDAP]
URI = "ldap://127.0.0.1:5389" URI = "ldap://127.0.0.1:5389"
ROOT_DN = "dc=mydomain,dc=tld" ROOT_DN = "dc=mydomain,dc=tld"

View file

@ -78,6 +78,11 @@ LEVEL = "DEBUG"
# written in the standard error output. # written in the standard error output.
# PATH = "" # PATH = ""
# [BACKENDS.SQL]
# The SQL database connection string
# Details on https://docs.sqlalchemy.org/en/20/core/engines.html
# SQL_DATABASE_URI = "postgresql://user:password@localhost/database"
# [BACKENDS.LDAP] # [BACKENDS.LDAP]
# URI = "ldap://localhost" # URI = "ldap://localhost"
# ROOT_DN = "dc=mydomain,dc=tld" # ROOT_DN = "dc=mydomain,dc=tld"

263
demo/conf/canaille-sql.toml Normal file
View file

@ -0,0 +1,263 @@
# All the Flask configuration values can be used:
# https://flask.palletsprojects.com/en/2.3.x/config/#builtin-configuration-values
# The flask secret key for cookies. You MUST change this.
SECRET_KEY = "change me before you go in production"
# Your organization name.
# NAME = "Canaille"
# The interface on which canaille will be served
# SERVER_NAME = "auth.mydomain.tld"
# PREFERRED_URL_SCHEME = "https"
# You can display a logo to be recognized on login screens
LOGO = "/static/img/canaille-head.png"
# Your favicon. If unset the LOGO will be used.
FAVICON = "/static/img/canaille-c.png"
# The name of a theme in the 'theme' directory, or an absolute path
# to a theme. Defaults to 'default'. Theming is done with
# https://github.com/tktech/flask-themer
# THEME = "default"
# If unset, language is detected
# LANGUAGE = "en"
# The timezone in which datetimes will be displayed to the users.
# If unset, the server timezone will be used.
# TIMEZONE = UTC
# If you have a sentry instance, you can set its dsn here:
# SENTRY_DSN = "https://examplePublicKey@o0.ingest.sentry.io/0"
# Enables javascript to smooth the user experience
# JAVASCRIPT = true
# Accelerates webpages with async requests
# HTMX = true
# If EMAIL_CONFIRMATION is set to true, users will need to click on a
# confirmation link sent by email when they want to add a new email.
# By default, this is true if SMTP is configured, else this is false.
# If explicitely set to true and SMTP is disabled, the email field
# will be read-only.
# EMAIL_CONFIRMATION =
# If ENABLE_REGISTRATION is true, then users can freely create an account
# at this instance. If email verification is available, users must confirm
# their email before the account is created.
ENABLE_REGISTRATION = true
# If HIDE_INVALID_LOGINS is set to true (the default), when a user
# tries to sign in with an invalid login, a message is shown indicating
# that the password is wrong, but does not give a clue wether the login
# exists or not.
# If HIDE_INVALID_LOGINS is set to false, when a user tries to sign in with
# an invalid login, a message is shown indicating that the login does not
# exist.
# HIDE_INVALID_LOGINS = true
# If ENABLE_PASSWORD_RECOVERY is false, then users cannot ask for a password
# recovery link by email. This option is true by default.
# ENABLE_PASSWORD_RECOVERY = true
# The validity duration of registration invitations, in seconds.
# Defaults to 2 days
# INVITATION_EXPIRATION = 172800
[LOGGING]
# LEVEL can be one value among:
# DEBUG, INFO, WARNING, ERROR, CRITICAL
# Defaults to WARNING
# LEVEL = "WARNING"
LEVEL = "DEBUG"
# The path of the log file. If not set (the default) logs are
# written in the standard error output.
# PATH = ""
[BACKENDS.SQL]
# The SQL database connection string
# Details on https://docs.sqlalchemy.org/en/20/core/engines.html
SQL_DATABASE_URI = "sqlite:///demo.sqlite"
# [BACKENDS.LDAP]
# URI = "ldap://localhost"
# ROOT_DN = "dc=mydomain,dc=tld"
# BIND_DN = "cn=admin,dc=mydomain,dc=tld"
# BIND_PW = "admin"
# TIMEOUT = 10
# Where to search for users?
# USER_BASE = "ou=users,dc=mydomain,dc=tld"
# The object class to use for creating new users
# USER_CLASS = "inetOrgPerson"
# The attribute to identify an object in the User dn.
# USER_RDN = "uid"
# Filter to match users on sign in. Jinja syntax is supported
# and a `login` variable is available containing the value
# passed in the login field.
# USER_FILTER = "(|(uid={{ login }})(mail={{ login }}))"
# Where to search for groups?
# GROUP_BASE = "ou=groups,dc=mydomain,dc=tld"
# The object class to use for creating new groups
# GROUP_CLASS = "groupOfNames"
# The attribute to identify an object in the User dn.
# GROUP_RDN = "cn"
# The attribute to use to identify a group
# GROUP_NAME_ATTRIBUTE = "cn"
[ACL]
# You can define access controls that define what users can do on canaille
# An access control consists in a FILTER to match users, a list of PERMISSIONS
# matched users will be able to perform, and fields users will be able
# to READ and WRITE. Users matching several filters will cumulate permissions.
#
# 'FILTER' parameter can be:
# - absent, in which case all the users will match this access control
# - a mapping where keys are user attributes name and the values those user
# attribute values. All the values must be matched for the user to be part
# of the access control.
# - a list of those mappings. If a user values match at least one mapping,
# then the user will be part of the access control
#
# Here are some examples
# FILTER = {user_name = 'admin'}
# FILTER =
# - {groups = 'admins'}
# - {groups = 'moderators'}
#
# The 'PERMISSIONS' parameter that is an list of items the users in the access
# control will be able to manage. 'PERMISSIONS' is optionnal. Values can be:
# - "edit_self" to allow users to edit their own profile
# - "use_oidc" to allow OpenID Connect authentication
# - "manage_oidc" to allow OpenID Connect client managements
# - "manage_users" to allow other users management
# - "manage_groups" to allow group edition and creation
# - "delete_account" allows a user to delete his own account. If used with
# manage_users, the user can delete any account
# - "impersonate_users" to allow a user to take the identity of another user
#
# The 'READ' and 'WRITE' attributes are the LDAP attributes of the user
# object that users will be able to read and/or write.
[ACL.DEFAULT]
PERMISSIONS = ["edit_self", "use_oidc"]
READ = [
"user_name",
"groups",
"lock_date",
]
WRITE = [
"photo",
"given_name",
"family_name",
"display_name",
"password",
"phone_numbers",
"emails",
"profile_url",
"formatted_address",
"street",
"postal_code",
"locality",
"region",
"preferred_language",
"employee_number",
"department",
"title",
"organization",
]
[ACL.ADMIN]
FILTER = {groups = "admins"}
PERMISSIONS = [
"manage_users",
"manage_groups",
"manage_oidc",
"delete_account",
"impersonate_users",
]
WRITE = [
"groups",
"lock_date",
]
[ACL.HALF_ADMIN]
FILTER = {groups = "moderators"}
PERMISSIONS = ["manage_users", "manage_groups", "delete_account"]
WRITE = ["groups"]
# The jwt configuration. You can generate a RSA keypair with:
# openssl genrsa -out private.pem 4096
# openssl rsa -in private.pem -pubout -outform PEM -out public.pem
[OIDC]
# Wether a token is needed for the RFC7591 dynamical client registration.
# If true, no token is needed to register a client.
# If false, dynamical client registration needs a token defined
# in DYNAMIC_CLIENT_REGISTRATION_TOKENS
DYNAMIC_CLIENT_REGISTRATION_OPEN = true
# A list of tokens that can be used for dynamic client registration
DYNAMIC_CLIENT_REGISTRATION_TOKENS = [
"xxxxxxx-yyyyyyy-zzzzzz",
]
# REQUIRE_NONCE force the nonce exchange during the authentication flows.
# This adds security but may not be supported by all clients.
# REQUIRE_NONCE = true
[OIDC.JWT]
# PRIVATE_KEY_FILE and PUBLIC_KEY_FILE are the paths to the private and
# the public key. You can generate a RSA keypair with:
# openssl genrsa -out private.pem 4096
# openssl rsa -in private.pem -pubout -outform PEM -out public.pem
# If the variables are unset, and debug mode is enabled,
# a in-memory keypair will be used.
# PRIVATE_KEY_FILE = "/path/to/private.pem"
# PUBLIC_KEY_FILE = "/path/to/public.pem"
# The URI of the identity provider
# ISS = "https://auth.mydomain.tld"
# The key type parameter
# KTY = "RSA"
# The key algorithm
# ALG = "RS256"
# The time the JWT will be valid, in seconds
# EXP = 3600
[OIDC.JWT.MAPPING]
# Mapping between JWT fields and LDAP attributes from your
# User objectClass.
# {attribute} will be replaced by the user ldap attribute value.
# Default values fits inetOrgPerson.
# SUB = "{{ user.user_name }}"
# NAME = "{{ user.formatted_name }}"
# PHONE_NUMBER = "{{ user.phone_numbers[0] }}"
# EMAIL = "{{ user.preferred_email }}"
# GIVEN_NAME = "{{ user.given_name }}"
# FAMILY_NAME = "{{ user.family_name }}"
# PREFERRED_USERNAME = "{{ user.display_name }}"
# LOCALE = "{{ user.preferred_language }}"
# ADDRESS = "{{ user.formatted_address }}"
# PICTURE = "{% if user.photo %}{{ url_for('core.account.photo', user=user, field='photo', _external=True) }}{% endif %}"
# WEBSITE = "{{ user.profile_url }}"
# The SMTP server options. If not set, mail related features such as
# user invitations, and password reset emails, will be disabled.
[SMTP]
# HOST = "localhost"
# PORT = 25
# TLS = false
# SSL = false
# LOGIN = ""
# PASSWORD = ""
# FROM_ADDR = "admin@mydomain.tld"

View file

@ -0,0 +1,54 @@
---
version: "3"
services:
canaille:
build:
context: ..
dockerfile: demo/Dockerfile-canaille
environment:
- AUTHLIB_INSECURE_TRANSPORT=1
- FLASK_DEBUG=1
- CONFIG=/opt/canaille/conf/canaille-sql.toml
- FLASK_APP=demoapp
volumes:
- ../canaille:/opt/canaille/canaille
- ./conf-docker:/opt/canaille/conf
ports:
- 5000:5000
client1:
depends_on:
- canaille
build:
context: .
dockerfile: Dockerfile-client
environment:
- FLASK_DEBUG=1
- CONFIG=/opt/client/conf/client1.cfg
- FLASK_APP=client
volumes:
- ./client:/opt/client/client
- ./conf-docker:/opt/client/conf
- ../canaille/static:/opt/canaille/static
command: --port=5001
ports:
- 5001:5001
client2:
depends_on:
- canaille
build:
context: .
dockerfile: Dockerfile-client
environment:
- FLASK_DEBUG=1
- CONFIG=/opt/client/conf/client2.cfg
- FLASK_APP=client
volumes:
- ./client:/opt/client/client
- ./conf-docker:/opt/client/conf
- ../canaille/static:/opt/canaille/static
command: --port=5002
ports:
- 5002:5002

View file

@ -1 +1 @@
docker-compose-memory.yml docker-compose-sql.yml

View file

@ -4,7 +4,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
if [ "$1" = "--backend" -a -n "$2" ]; then if [ "$1" = "--backend" -a -n "$2" ]; then
BACKEND="$2" BACKEND="$2"
else else
BACKEND="memory" BACKEND="sql"
fi fi
if ! type python > /dev/null 2>&1 && ! type python3 > /dev/null 2>&1; then if ! type python > /dev/null 2>&1 && ! type python3 > /dev/null 2>&1; then
@ -27,6 +27,11 @@ if [ "$BACKEND" = "memory" ]; then
poetry install --with demo --without dev --extras front --extras oidc poetry install --with demo --without dev --extras front --extras oidc
env poetry run honcho --procfile Procfile-memory start env poetry run honcho --procfile Procfile-memory start
elif [ "$BACKEND" = "sql" ]; then
poetry install --with demo --without dev --extras front --extras oidc --extras sql
env poetry run honcho --procfile Procfile-sql start
elif [ "$BACKEND" = "ldap" ]; then elif [ "$BACKEND" = "ldap" ]; then
if ! type slapd > /dev/null 2>&1; then if ! type slapd > /dev/null 2>&1; then
@ -40,7 +45,7 @@ elif [ "$BACKEND" = "ldap" ]; then
else else
echo "Usage: run.sh --backend [memory|ldap]" echo "Usage: run.sh --backend [sql|memory|ldap]"
fi fi

View file

@ -4,6 +4,8 @@ Backends
.. contents:: .. contents::
:local: :local:
Canaille can read and save data in different databases:
Memory Memory
====== ======
@ -12,9 +14,25 @@ This backend is only for test purpose and should not be used in production envir
It is used when the ``BACKENDS`` configuration parameter is unset or empty. It is used when the ``BACKENDS`` configuration parameter is unset or empty.
SQL
===
Canaille can use any database supported by `SQLAlchemy <https://www.sqlalchemy.org/>`_, such as
sqlite, postgresql or mariadb.
It is used when the ``BACKENDS.SQL`` configuration parameter is defined.
LDAP LDAP
==== ====
Canaille can use OpenLDAP as its main database.
It is used when the ``BACKENDS.SQL`` configuration parameter is defined.
.. note ::
Currently, only the ``inetOrgPerson`` and ``groupOfNames`` schemas have been tested.
If you want to use different schemas or LDAP servers, adaptations may be needed.
Patches are welcome.
Canaille can integrate with several OpenLDAP overlays: Canaille can integrate with several OpenLDAP overlays:
memberof / refint memberof / refint

View file

@ -90,6 +90,13 @@ LOGGING
:PATH: :PATH:
*Optional.* The log file path. If not set, logs are written in the standard error output. *Optional.* The log file path. If not set, logs are written in the standard error output.
BACKENDS.SQL
------------
:SQL_DATABASE_URI:
**Required.** The SQL database connection string, as defined in
`SQLAlchemy documentation <https://docs.sqlalchemy.org/en/20/core/engines.html>`_.
BACKENDS.LDAP BACKENDS.LDAP
------------- -------------

179
poetry.lock generated
View file

@ -669,6 +669,76 @@ files = [
[package.dependencies] [package.dependencies]
python-dateutil = ">=2.7" python-dateutil = ">=2.7"
[[package]]
name = "greenlet"
version = "3.0.1"
description = "Lightweight in-process concurrent programming"
optional = true
python-versions = ">=3.7"
files = [
{file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"},
{file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"},
{file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"},
{file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"},
{file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"},
{file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"},
{file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"},
{file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"},
{file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"},
{file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"},
{file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"},
{file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"},
{file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"},
{file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"},
{file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"},
{file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"},
{file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"},
{file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"},
{file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"},
{file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"},
{file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"},
{file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"},
{file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"},
{file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"},
{file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"},
{file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"},
{file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"},
{file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"},
{file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"},
{file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"},
{file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"},
{file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"},
{file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"},
{file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"},
{file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"},
{file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"},
{file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"},
{file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"},
{file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"},
{file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"},
{file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"},
{file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"},
{file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"},
{file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"},
{file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"},
{file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"},
{file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"},
{file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"},
{file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"},
{file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"},
{file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"},
{file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"},
{file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"},
{file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"},
{file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"},
{file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"},
{file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"},
]
[package.extras]
docs = ["Sphinx"]
test = ["objgraph", "psutil"]
[[package]] [[package]]
name = "honcho" name = "honcho"
version = "1.1.0" version = "1.1.0"
@ -1712,6 +1782,110 @@ files = [
lint = ["docutils-stubs", "flake8", "mypy"] lint = ["docutils-stubs", "flake8", "mypy"]
test = ["pytest"] test = ["pytest"]
[[package]]
name = "sqlalchemy"
version = "2.0.23"
description = "Database Abstraction Library"
optional = true
python-versions = ">=3.7"
files = [
{file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"},
{file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"},
{file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"},
{file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"},
{file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"},
{file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"},
{file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"},
{file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"},
{file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
typing-extensions = ">=4.2.0"
[package.extras]
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)"]
mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx-oracle (>=8)"]
oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "sqlalchemy-json"
version = "0.7.0"
description = "JSON type with nested change tracking for SQLAlchemy"
optional = true
python-versions = ">= 3.6"
files = [
{file = "sqlalchemy-json-0.7.0.tar.gz", hash = "sha256:620d0b26f648f21a8fa9127df66f55f83a5ab4ae010e5397a5c6989a08238561"},
{file = "sqlalchemy_json-0.7.0-py3-none-any.whl", hash = "sha256:27881d662ca18363a4ac28175cc47ea2a6f2bef997ae1159c151026b741818e6"},
]
[package.dependencies]
sqlalchemy = ">=0.7"
[package.extras]
dev = ["pytest"]
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.10.2" version = "0.10.2"
@ -1881,13 +2055,14 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[extras] [extras]
all = ["authlib", "email_validator", "flask-babel", "flask-themer", "pycountry", "python-ldap", "pytz", "sentry-sdk", "toml"] all = ["authlib", "email_validator", "flask-babel", "flask-themer", "pycountry", "python-ldap", "pytz", "sentry-sdk", "sqlalchemy", "sqlalchemy-json", "toml"]
front = ["email_validator", "flask-babel", "flask-themer", "pycountry", "pytz", "toml"] front = ["email_validator", "flask-babel", "flask-themer", "pycountry", "pytz", "toml"]
ldap = ["python-ldap"] ldap = ["python-ldap"]
oidc = ["authlib"] oidc = ["authlib"]
sentry = ["sentry-sdk"] sentry = ["sentry-sdk"]
sql = ["sqlalchemy", "sqlalchemy-json"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.8" python-versions = "^3.8"
content-hash = "6988114f1305e3582047cde32e1672b53477adc6adfbb5dde45ebe14a2e403b0" content-hash = "b3561cb0465972869503d18840bb7a3da62a75c447b827b0db79183f49c4e31f"

View file

@ -60,6 +60,10 @@ python-ldap = {version = "^3.4.0", optional=true}
# extra : sentry # extra : sentry
sentry-sdk = {version = "<2", optional=true, extras=["flask"]} sentry-sdk = {version = "<2", optional=true, extras=["flask"]}
# extra : sql
sqlalchemy-json = {version = "^0.7.0", optional=true}
sqlalchemy = {version = "^2.0.23", optional=true}
[tool.poetry.group.doc] [tool.poetry.group.doc]
optional = true optional = true
@ -112,6 +116,10 @@ oidc = [
sentry = [ sentry = [
"sentry-sdk", "sentry-sdk",
] ]
sql = [
"sqlalchemy",
"sqlalchemy-json",
]
all = [ all = [
"click", "click",
"email_validator", "email_validator",
@ -123,6 +131,8 @@ all = [
"python-ldap", "python-ldap",
"authlib", "authlib",
"sentry-sdk", "sentry-sdk",
"sqlalchemy",
"sqlalchemy-json",
] ]
[tool.poetry.scripts] [tool.poetry.scripts]

View file

View file

@ -0,0 +1,18 @@
import pytest
from canaille.backends.sql.backend import Backend
@pytest.fixture
def sqlalchemy_configuration(configuration):
configuration["BACKENDS"] = {
"SQL": {"SQL_DATABASE_URI": "sqlite:///:memory:"},
}
yield configuration
del configuration["BACKENDS"]
@pytest.fixture
def sql_backend(sqlalchemy_configuration):
backend = Backend(sqlalchemy_configuration)
with backend.session(init=True):
yield backend

View file

@ -213,6 +213,9 @@ def test_model_references(testclient, user, foo_group, admin, bar_group, backend
def test_model_references_set_unsaved_object( def test_model_references_set_unsaved_object(
testclient, logged_moderator, user, backend testclient, logged_moderator, user, backend
): ):
if "sql" in backend.__class__.__module__:
pytest.skip()
group = models.Group(members=[user], display_name="foo") group = models.Group(members=[user], display_name="foo")
group.save() group.save()
user.reload() # LDAP groups can be inconsistent by containing members which doesn't exist user.reload() # LDAP groups can be inconsistent by containing members which doesn't exist