Initial commit

This commit is contained in:
Oliver Traber 2023-09-21 14:56:01 +02:00
commit b9d5c06956
Signed by: Bluemedia
GPG key ID: C0674B105057136C
55 changed files with 4706 additions and 0 deletions

2
backend/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
.venv
**/__pycache__

116
backend/alembic.ini Normal file
View file

@ -0,0 +1,116 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
timezone = UTC
# max length of characters to apply to the
# "slug" field
truncate_slug_length = 20
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///./scanner.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
backend/alembic/README Normal file
View file

@ -0,0 +1 @@
Generic single-database configuration.

79
backend/alembic/env.py Normal file
View file

@ -0,0 +1,79 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from app.data import models
target_metadata = models.Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View file

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

0
backend/app/__init__.py Normal file
View file

View file

View file

@ -0,0 +1,12 @@
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "sqlite:///./scanner.db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()

View file

@ -0,0 +1 @@
from .database import Base

View file

@ -0,0 +1,13 @@
from pydantic import BaseModel
import app.scanner.enums as scan
class ScanPage(BaseModel):
filename: str
size_bytes: int
class Config():
orm_mode = True
class ScanStatus(BaseModel):
pages: list[ScanPage]
status: scan.Status

43
backend/app/main.py Normal file
View file

@ -0,0 +1,43 @@
import threading
from contextlib import asynccontextmanager
from typing import Annotated
from fastapi import FastAPI, Depends
from app.data import models
from app.data.database import SessionLocal, engine
from app.scanner.scanner import Scanner
from app.scanner.scanner import Status as ScannerStatus
models.Base.metadata.create_all(bind=engine)
__scanner = Scanner("/var/www/html/img")
@asynccontextmanager
async def __lifespan(app: FastAPI):
threading.Thread(target=__scanner.preload).start()
yield
app = FastAPI(lifespan=__lifespan)
# SQLAlchemiy session dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
def get_scanner():
return __scanner
from app.routers import power
app.include_router(power.router)
from app.routers import scan
app.include_router(scan.router)
@app.get("/api/ready")
async def __ready(scanner: Annotated[Scanner, Depends(get_scanner)]):
return scanner.get_status() != ScannerStatus.INITIALIZED

View file

View file

@ -0,0 +1,14 @@
from fastapi import APIRouter
import subprocess
router = APIRouter(prefix="/api/power")
@router.post("/shutdown")
async def power_shutdown():
subprocess.call(["sudo", "shutdown", "-h", "now"])
return {}
@router.post("/restart")
async def power_restart():
subprocess.call(["sudo", "reboot"])
return {}

View file

@ -0,0 +1,23 @@
from typing import Annotated
from app.scanner.scanner import Scanner
from app.main import get_scanner
from fastapi import APIRouter, Depends
from app.data import schemas, models
router = APIRouter(prefix="/api/scan")
@router.post("")
async def scan(scanner: Annotated[Scanner, Depends(get_scanner)]):
scanner.scan()
return []
@router.get("/status", response_model=schemas.ScanStatus)
async def status(scanner: Annotated[Scanner, Depends(get_scanner)]):
pages = [schemas.ScanPage.from_orm(page) for page in scanner.get_pages()]
return schemas.ScanStatus(pages=pages,status=scanner.get_status())
@router.get("/debug")
async def debug(scanner: Annotated[Scanner, Depends(get_scanner)]):
return scanner.get_options()

View file

View file

@ -0,0 +1,22 @@
from enum import Enum
class Status(Enum):
INITIALIZED = "initialized"
IDLE = "idle"
RUNNING = "running"
DONE = "done"
ERR_NO_PAPER = "err_no_paper"
ERR_COVER_OPEN = "err_cover_open"
class Setting(Enum):
PAPER_SOURCE = "source"
COLOR_MODE = "color"
RESOLUTION = "resolution"
PAPER_SIZE = "paper_size"
class PaperSize(Enum):
A3 = "a3"
B3 = "b3"
A4 = "a4"
B4 = "b4"
LETTER = "letter"

View file

@ -0,0 +1,140 @@
import gi, os, threading
from typing import List
from PIL import Image
from app.scanner.enums import Status
gi.require_version('Libinsane', '1.0')
from gi.repository import Libinsane, GObject # type: ignore
class __LibinsaneSilentLogger(GObject.GObject, Libinsane.Logger):
def do_log(self, lvl, msg):
return
Libinsane.register_logger(__LibinsaneSilentLogger())
class Page:
filename: str
size_bytes: int
class Scanner:
def __get_device_id(self):
"""
List local scanners and get the device id of the first found device.
:param self: Instance of this class
:returns: Device id of the first scan device
"""
devs = self.api.list_devices(Libinsane.DeviceLocations.LOCAL_ONLY)
return devs[0].get_dev_id()
def __raw_to_img(self, params, img_bytes):
"""
"""
fmt = params.get_format()
assert(fmt == Libinsane.ImgFormat.RAW_RGB_24)
(w, h) = (
params.get_width(),
int(len(img_bytes) / 3 / params.get_width())
)
return Image.frombuffer("RGB", (w, h), img_bytes, "raw", "RGB", 0, 1)
def __write_file(self, scan_params, data, page_index, last_file):
data = b"".join(data)
if scan_params.get_format() == Libinsane.ImgFormat.RAW_RGB_24:
filesize = len(data)
img = self.__raw_to_img(scan_params, data)
filename = f"out{page_index}.png"
img.save(os.path.join(self.storage_path, filename), format="PNG")
page = Page()
page.filename = filename
page.size_bytes = filesize
self.scanned_pages.append(page)
if last_file:
self.status = Status.DONE
def __set_defaults(self):
dev = self.api.get_device(self.device_id)
opts = dev.get_options()
opts = {opt.get_name(): opt for opt in opts}
opts["sleeptimer"].set_value(1)
opts["resolution"].set_value(200)
dev.close()
def __scan(self):
self.status = Status.RUNNING
source = self.api.get_device(self.device_id)
opts = source.get_options()
opts = {opt.get_name(): opt for opt in opts}
if opts["cover-open"].get_value() == True:
self.status = Status.ERR_COVER_OPEN
return
session = source.scan_start()
try:
page_index = 0
while not session.end_of_feed() and page_index < 50:
# Do not assume that all the pages will have the same size !
scan_params = session.get_scan_parameters()
img = []
while not session.end_of_page():
data = session.read_bytes(256 * 1024)
data = data.get_data()
img.append(data)
t = threading.Thread(target=self.__write_file, args=(scan_params, img, page_index, session.end_of_feed()))
t.start()
page_index += 1
if page_index == 0:
self.status = Status.ERR_NO_PAPER
finally:
session.cancel()
source.close()
def __init__(self, storage_path):
self.scanned_pages: List[Page] = []
self.storage_path = storage_path
self.status = Status.INITIALIZED
def preload(self):
os.environ["LIBINSANE_NORMALIZER_SAFE_DEFAULTS"] = "0"
self.api = Libinsane.Api.new_safebet()
self.device_id = self.__get_device_id()
self.__set_defaults()
self.status = Status.IDLE
def scan(self):
if self.status == Status.RUNNING:
raise RuntimeError("already_running")
if self.status == Status.INITIALIZED:
self.preload()
self.scanned_pages: List[Page] = []
t = threading.Thread(target=self.__scan)
t.start()
def get_status(self) -> Status:
return self.status
def get_pages(self) -> List[Page]:
return self.scanned_pages
def get_options(self):
dev = self.api.get_device(self.device_id)
opts = dev.get_options()
result = {}
for opt in opts:
try:
result[opt.get_name()] = opt.get_value()
except Exception:
continue
dev.close()
return result
def cleanup(self):
if self.status == Status.RUNNING:
raise RuntimeError("scan_running")
if self.status != Status.INITIALIZED:
self.api.cleanup()

30
backend/requirements.txt Normal file
View file

@ -0,0 +1,30 @@
alembic==1.12.0
annotated-types==0.5.0
anyio==3.7.1
click==8.1.7
fastapi==0.103.1
greenlet==2.0.2
gunicorn==21.2.0
h11==0.14.0
httptools==0.6.0
idna==3.4
Mako==1.2.4
MarkupSafe==2.1.3
packaging==23.1
Pillow==10.0.0
pycairo==1.24.0
pydantic==1.10.12
pydantic_core==2.6.3
PyGObject==3.44.1
python-dateutil==2.8.2
python-dotenv==1.0.0
PyYAML==6.0.1
six==1.16.0
sniffio==1.3.0
SQLAlchemy==2.0.20
starlette==0.27.0
typing_extensions==4.7.1
uvicorn==0.23.2
uvloop==0.17.0
watchfiles==0.20.0
websockets==11.0.3