Compare commits

..

13 Commits

Author SHA1 Message Date
Jordi Loyzaga c2f7e56d6a testing ci again
continuous-integration/drone/push Build is passing Details
2024-12-14 22:39:47 -06:00
Jordi Loyzaga 6adba29ba1 Made bulk delete also delete files
continuous-integration/drone/push Build is passing Details
Fixed bug on env config evaluation
Fixed oversight that always displayed browsable api
Fixed file naming bug in file deletion
2024-09-20 03:48:16 -06:00
Jordi Loyzaga e3a021c53f Re-stamped requirements, really gotta automate this
continuous-integration/drone/push Build is passing Details
2024-09-19 04:03:45 -06:00
Jordi Loyzaga a8fade699c Changed default storage path setting
continuous-integration/drone/push Build is failing Details
2024-09-19 04:01:57 -06:00
Jordi Loyzaga 3279d6c5dc Upload is now working
continuous-integration/drone/push Build is failing Details
Added file hash validation (client vs server)
Added mime guessing
Added upload checkpoints
Improved error handling
2024-09-19 03:54:52 -06:00
Jordi Loyzaga ea84012059 Styling (really gotta get a commit hook working for this)
continuous-integration/drone/push Build is failing Details
2024-09-18 20:16:01 -06:00
Jordi Loyzaga eeaa1805bf Got chunked uploading kinda working
continuous-integration/drone/push Build is failing Details
First pass of FE chunked uploading (theres a 1 byte less per chunk per file, no idea why)
More formatting!
2024-09-18 20:15:18 -06:00
Jordi Loyzaga cc46df81fe Even more style fixes :)
continuous-integration/drone/push Build is failing Details
2024-09-17 02:21:48 -06:00
Jordi Loyzaga c6d7566c72 Linter and format fixes
continuous-integration/drone/push Build is failing Details
2024-09-17 02:18:58 -06:00
Jordi Loyzaga ff2d88493b Migrated back to flake8
continuous-integration/drone/push Build is failing Details
2024-09-17 02:07:13 -06:00
Jordi Loyzaga 526e0e7ddc Moved to single file storage (appending chunks to existing file)
continuous-integration/drone/push Build is failing Details
Added file finalize actions
Added error handling for files
Moved everything to pathlib
Simplified models
Squashed all migrations to single operation
2024-09-17 01:52:09 -06:00
Jordi Loyzaga a58f593c07 Added pagination, simplified models, rebuild chunking view
continuous-integration/drone/push Build is failing Details
2024-09-16 05:27:20 -06:00
Jordi Loyzaga c91fa9bd7b WIP Actually make this thing 2024-09-15 04:07:11 -06:00
38 changed files with 800 additions and 544 deletions

1
.gitignore vendored
View File

@ -95,3 +95,4 @@ venv.bak/
lockbox/media
lockbox/staticfiles
TODO.txt
FILES

8
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,8 @@
repos:
- repo: local
hooks:
- id: flake8
name: flake8
entry: flake8
language: system
files: '\.py$'

View File

@ -1,5 +1,5 @@
lint:
@ruff check $(shell git diff --diff-filter=ACM --name-only HEAD | grep '\.py$$' ) --config=./pyproject.toml
@flake8 $(shell git diff --diff-filter=ACM --name-only HEAD | grep '\.py$$' )
stampreqs:
poetry export --without-hashes --format=requirements.txt > requirements.txt

View File

@ -1,8 +1,6 @@
from django.apps import (
AppConfig,
)
from django.apps import AppConfig
class CommonConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'common'
default_auto_field = "django.db.models.BigAutoField"
name = "common"

View File

@ -1,24 +1,25 @@
import re
CONTENT_RANGE_HEADER = "HTTP_CONTENT_RANGE"
CONTENT_RANGE_HEADER_PATTERN = re.compile(r"^bytes (?P<start>\d+)-(?P<end>\d+)/(?P<total>\d+)$")
CONTENT_RANGE_HEADER_PATTERN = re.compile(r"^bytes (?P<start>\d+)-(?P<end>\d+)$")
class UPLOAD_STATUS_TYPES:
UPLOADING = "uploading"
COMPLETED = "completed"
ABANDONED = "abandoned"
PROCESSING = "processing"
ERROR = "error"
class UPLOAD_ERROR_CODES:
FILE_MISSING = "file_missing"
CHUNK_MISMATCH = "chunk_mismatch"
VERIFICATION_FAILED = "verification_failed"
# Config
CONFIG_KEYS = {
"EXPIRATION_DELTA_MINUTES": {
"description": "Date created + this delta at which file expires",
"verbose_name": "File expiration delta (minutes)",
"native_type": int,
"sensitive": False,
"default": 120,
},
"ABANDONED_DELTA_MINUTES": {
"description": "Date created + this delta at which a file is marked as abandoned",
"verbose_name": "Uncompleted file abandoned max age",
@ -38,14 +39,21 @@ CONFIG_KEYS = {
"verbose_name": "Max per chunk size in bytes",
"native_type": int,
"sensitive": False,
"default": 1024 * 1024 * 20, # 20 MB
"default": 1024 * 1024 * 2, # 2 MB
},
"MAX_FILE_BYTES": {
"description": "Max total file size in bytes",
"verbose_name": "Max upload size in bytes",
"native_type": int,
"sensitive": False,
"default": 1024 * 1024 * 200, # 200 MB
"default": 1024 * 1024 * 30, # 300 MB
},
"VERIFY_ENABLE": {
"description": "Verify uploaded file integrity(sha256)",
"verbose_name": "File integrity verification",
"native_type": bool,
"sensitive": False,
"default": True,
},
"ENABLE_BROWSABLE_API": {
"description": "REST Framework browsable API is enabled (Always enabled if DEBUG is true)",
@ -82,4 +90,11 @@ CONFIG_KEYS = {
"sensitive": False,
"default": ".",
},
"STORAGE_ABSOLUTE_PATH": {
"description": "Path where files are stored",
"verbose_name": "Storage path",
"native_type": str,
"sensitive": False,
"default": ".",
},
}

View File

@ -1,6 +1,5 @@
from django.urls import path
from common import views_api
from django.urls import path
urlpatterns = [
path("api/configs/", views_api.configs, name="api-config"),

View File

@ -15,25 +15,27 @@ class Config(NamedTuple):
value: Any
source: str
def normalize_string(string, form="NFKC"):
return normalize(form, string)
def cast_to_native_type(key, value, native_type):
if native_type == list:
value = value.split(",")
if native_type == bool:
if value == "false":
if value.lower() == "false":
return False
return True
try:
return native_type(value)
except ValueError as e:
message = (
f"Received unexpected value type for configuration key {key}\nValue: {value}\nExpected type : {native_type}"
)
message = f"Received unexpected value type for configuration key {key}\n\
Value: {value}\n\
Expected type : {native_type}"
raise ValueError(message) from e

View File

@ -1,15 +1,15 @@
from common.constants import CONFIG_KEYS
from common.serializers import ConfigSerializer
from common.utils import get_config
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from common.constants import CONFIG_KEYS
from common.serializers import ConfigSerializer
from common.utils import get_config
def get_all_configs():
return [get_config(key, value_only=False)._asdict() for key in CONFIG_KEYS]
@api_view(["GET"])
def configs(request, key=None):
if key:

View File

@ -11,6 +11,6 @@ import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lockbox.settings')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lockbox.settings")
application = get_asgi_application()

View File

@ -1,11 +1,12 @@
"""Lockbox File Sharing"""
from pathlib import Path
from common.utils import get_config
from dotenv import load_dotenv
from lockbox.setup import validate_paths
load_dotenv()
# Build paths inside the project like this: BASE_DIR / 'subdir'.
@ -53,48 +54,46 @@ MIDDLEWARE = [
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = 'lockbox.urls'
ROOT_URLCONF = "lockbox.urls"
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / "templates"],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [BASE_DIR / "templates"],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = 'lockbox.wsgi.application'
WSGI_APPLICATION = "lockbox.wsgi.application"
# Password validation
# https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_TZ = True
@ -105,17 +104,35 @@ STATICFILES_DIRS = [
BASE_DIR / "static",
]
STATIC_ROOT = BASE_DIR / "staticfiles"
STATIC_URL = 'static/'
STATIC_URL = "static/"
STORAGES = {
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"staticfiles": {
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
},
}
# Storage
MEDIA_ROOT = BASE_DIR / "media"
MEDIA_ROOT = Path(get_config("STORAGE_ABSOLUTE_PATH"))
MEDIA_URL = "files/"
INCOMPLETE_EXT = ".incomplete"
DEFAULT_FILE_HEADER_BYTES = 2048
validate_paths(MEDIA_ROOT)
# Default primary key field type
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL = 'user.LockboxUser'
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
AUTH_USER_MODEL = "user.LockboxUser"
REST_FRAMEWORK_RENDER_CLASSES = ["rest_framework.renderers.JSONRenderer",]
if get_config("ENABLE_BROWSABLE_API"):
REST_FRAMEWORK_RENDER_CLASSES.append("rest_framework.renderers.BrowsableAPIRenderer")
REST_FRAMEWORK = {
"DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination",
"PAGE_SIZE": 25,
"DEFAULT_RENDERER_CLASSES": tuple(REST_FRAMEWORK_RENDER_CLASSES),
}

16
lockbox/lockbox/setup.py Normal file
View File

@ -0,0 +1,16 @@
from pathlib import Path
def validate_paths(media_path):
"""Validates media path and validates that we can actually write to that location
Args:
media_path (str): Absolute-path-like string where files are stored
Raises:
e: Any exception that might happen (Permission Denied, Path does not exist, etc.)
"""
try:
Path(media_path).mkdir(exist_ok=True)
except Exception as e:
raise e

View File

@ -12,6 +12,6 @@ urlpatterns = [
if get_config("ENABLE_BROWSABLE_API"):
urlpatterns.extend(path('api-auth/', include('rest_framework.urls')))
urlpatterns.append(path("api-auth/", include("rest_framework.urls")))
urlpatterns.extend(static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))

View File

@ -11,6 +11,6 @@ import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lockbox.settings')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lockbox.settings")
application = get_wsgi_application()

View File

@ -1,64 +1,111 @@
const fileInput = document.getElementById('file-upload');
const upload_ready = false;
const fileInput = document.getElementById("file-upload");
const uploadButton = document.getElementById("upload-button");
const fileSizeReport = document.getElementById("file-size");
const progressBar = document.getElementById("progressBar");
let isReady = false;
fileInput.value = '';
fileInput.addEventListener('change', handleFileChange);
uploadButton.addEventListener('click', handleFileUpload);
function handleFileChange(event) {
const file = event.target.files[0];
const file_size = file.size;
console.log("Max file bytes is : ", max_file_bytes);
console.log("File size is: ", file_size);
fileSizeReport.textContent = "File size is: " + file.size + " bytes";
if (file_size > max_file_bytes){
console.log("PLACEHOLDER: Size too big man.");
console.log("File size is too large");
isReady = false;
return
}
console.log("Ready!");
isReady = true;
}
async function handleFileUpload(event) {
if (!isReady){
console.log("Not ready");
return
}
console.log("PLACEHOLDER: Ready!");
isReady = false;
const file = fileInput.files[0];
let headers = new Headers();
headers.append("Content-Type", "application/json");
const request_args = {
method: "POST",
headers: headers,
body: JSON.stringify(
{
"filename": file.name,
"expected_size": file.size,
"sha256": await getHash(file),
}
)
};
const response = await fetch(uploadPath, request_args);
if (!response.ok) {
throw new Error(`Response status: ${response.status}`);
}
function handleFileUpload(event) {
const file = event.target.files[0];
let start = 0;
let end = 0;
let chunk;
while (start < file.size) {
chunk = file.slice(start, start + chunk_size);
end = chunk.size - start;
console.log("LID: ", file_id);
file_id = uploadChunk(chunk, start, end, file.size, file_id);
start += chunk_size;
}
const apifile = await response.json();
await uploadChunks(apifile);
}
function uploadChunk(chunk, start, end, total, file_id=null) {
const formData = new FormData();
const range_header = `bytes ${start}-${end}/${total}`;
formData.append('file', chunk);
if (file_id) {
formData.append("lid", file_id);
function updateProgressBar(remaining, total) {
let current_percent = Math.round((total - remaining) / (total / 100));
progressBar.textContent = current_percent + " %";
}
let request = new Request(".", {
method: 'POST',
body: formData,
headers: {
'X-CSRFToken': csrftoken,
'Content-range': range_header
}
})
return _uploadChunk(request);
async function uploadChunks(remoteFile){
const chunkPath = chunkPathTemplate.replace("@", remoteFile.lid);
let file = fileInput.files[0];
let bytes_remaining = remoteFile.expected_size;
let last_transfer_position = remoteFile.last_end_bytes; // Start where we left, default is 0;
let to_transfer = remoteFile.max_size_chunk_bytes;
console.log("Chunk size is: " + remoteFile.max_size_chunk_bytes);
while (bytes_remaining >= 0) {
if (bytes_remaining <= remoteFile.max_size_chunk_bytes) {
to_transfer = bytes_remaining;
bytes_remaining = 0;
}
async function _uploadChunk(request) {
const _response = await fetch(request)
.then(async (response)=>response.json())
.then((data) =>{
return data.lid;
})
return _response;
await uploadChunk(file, [last_transfer_position, last_transfer_position += to_transfer], chunkPath);
bytes_remaining -= to_transfer;
updateProgressBar(bytes_remaining, remoteFile.expected_size);
}
console.log("Done!");
progressBar.textContent = "Done!";
}
async function uploadChunk(file, byte_range, chunkPath) {
let file_bytes_target = file.slice(byte_range[0], byte_range[1]);
let body = new FormData();
body.append("Content", file_bytes_target);
let headers = new Headers();
headers.append("Content-Disposition", 'attachment; filename="DUMMY"');
headers.append("Content-Range", "bytes " + byte_range[0] + "-" + byte_range[1])
const request_args = {
method: "PUT",
headers: headers,
body: body
};
const response = await fetch(chunkPath, request_args);
if (!response.ok) {
throw new Error(`Response status: ${response.status}`);
}
}

View File

@ -13,3 +13,31 @@ function getCookie(name) {
}
return cookieValue;
}
function arrayBufferToWordArray(ab) {
var i8a = new Uint8Array(ab);
var a = [];
for (var i = 0; i < i8a.length; i += 4) {
a.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CryptoJS.lib.WordArray.create(a, i8a.length);
}
async function getHash(file) {
// I hate this language so much.
const read = (blob) => new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (event) => resolve(event.target.result);
reader.onerror = reject;
reader.readAsArrayBuffer(blob);
});
const file_bytes = await read(file);
hash = CryptoJS.SHA256(
arrayBufferToWordArray(
file_bytes
)
);
return hash.toString(CryptoJS.enc.Hex);
}

View File

@ -1,9 +1,9 @@
from django.contrib import admin
from storage.models import File
class FileAdmin(admin.ModelAdmin):
readonly_fields = File.readonly_fields
admin.site.register(File, FileAdmin)

View File

@ -2,5 +2,5 @@ from django.apps import AppConfig
class StorageConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'storage'
default_auto_field = "django.db.models.BigAutoField"
name = "storage"

View File

@ -1,12 +0,0 @@
from django import forms
from storage.models import File
class FileForm(forms.ModelForm):
set_name = forms.BooleanField()
class Meta:
model = File
exclude = File.readonly_fields

View File

@ -1,4 +1,4 @@
# Generated by Django 4.2.10 on 2024-02-16 08:15
# Generated by Django 4.2.15 on 2024-09-19 09:40
import common.utils
from django.conf import settings
@ -23,40 +23,23 @@ class Migration(migrations.Migration):
('lid', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False, verbose_name='lockbox ID')),
('date_created', models.DateTimeField(blank=True, help_text='date at which this object was created', verbose_name='date created')),
('date_updated', models.DateTimeField(blank=True, help_text='date at which this object was last updated', verbose_name='date updated')),
('filename', models.CharField(help_text='display name of this file', max_length=255, verbose_name='name')),
('extension', models.CharField(blank=True, help_text='reported filesystem extension (not mime type)', max_length=128, null=True, verbose_name='extension')),
('file', models.FileField(blank=True, help_text='actual file', null=True, upload_to='', verbose_name='file')),
('status', models.CharField(choices=[('uploading', 'uploading'), ('completed', 'completed'), ('processing', 'processing'), ('abandoned', 'abandoned')], default='uploading', help_text='upload status for file', max_length=10, verbose_name='status')),
('date_completed', models.DateTimeField(blank=True, help_text="datetime at which this file's upload was completed", null=True, verbose_name='completed on')),
('mime_type', models.CharField(blank=True, help_text='reported mime-type', max_length=128, null=True, verbose_name='mime-type')),
('file', models.FileField(blank=True, help_text='actual file', null=True, upload_to=storage.models._upload_to_fielpath, verbose_name='file')),
('filename', models.CharField(help_text='file name', max_length=256, verbose_name='filename')),
('status', models.CharField(choices=[('uploading', 'uploading'), ('completed', 'completed'), ('abandoned', 'abandoned'), ('error', 'error')], default='uploading', help_text='upload status for file', max_length=10, verbose_name='status')),
('datetime_completed', models.DateTimeField(blank=True, help_text="datetime at which this file's upload was completed", null=True, verbose_name='completed on')),
('expires', models.BooleanField(default=False, help_text="will be scrubbed on 'date_expires'", verbose_name='expires')),
('sha256', models.CharField(help_text='file hash (sha256)', max_length=64, verbose_name='hash (sha256)')),
('delete_on_expiration', models.BooleanField(default=False, help_text='will be deleted if expired and expires is true', verbose_name='delete on expiration')),
('size_on_disk', models.PositiveBigIntegerField(blank=True, help_text='total size on disk for this file', null=True, verbose_name='size on disk (bytes)')),
('size', models.PositiveBigIntegerField(blank=True, help_text='total size on disk for this file', null=True, verbose_name='size (bytes)')),
('expected_size', models.PositiveBigIntegerField(help_text='expected file size', verbose_name='expected size (bytes)')),
('max_size_chunk_bytes', models.PositiveBigIntegerField(default=common.utils.get_max_size_chunk_bytes, help_text='max size of each individual chunk for this file', verbose_name='maximum size of chunks (bytes)')),
('owner', models.ForeignKey(blank=True, help_text='owner of this file', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='files_owned', to=settings.AUTH_USER_MODEL, verbose_name='owner')),
('last_end_bytes', models.BigIntegerField(default=0, help_text='last uploaded bytes position', verbose_name='last end bytes')),
('owner', models.ForeignKey(blank=True, help_text='Who owns this file', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='files_owned', to=settings.AUTH_USER_MODEL, verbose_name='owner')),
],
options={
'verbose_name': 'file',
'verbose_name_plural': 'files',
},
),
migrations.CreateModel(
name='FileChunk',
fields=[
('lid', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False, verbose_name='lockbox ID')),
('date_created', models.DateTimeField(blank=True, help_text='date at which this object was created', verbose_name='date created')),
('date_updated', models.DateTimeField(blank=True, help_text='date at which this object was last updated', verbose_name='date updated')),
('chunk', models.FileField(help_text='actual file', upload_to=storage.models.get_upload_path_chunk, verbose_name='file')),
('chunk_id', models.BigIntegerField(help_text='part of chunk', verbose_name='chunk id')),
('size', models.BigIntegerField(help_text='size for this chunk', verbose_name='size')),
('start', models.BigIntegerField(help_text='start for this chunk', verbose_name='start')),
('end', models.BigIntegerField(help_text='end for this chunk', verbose_name='end')),
('file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='chunks', to='storage.file')),
('owner', models.ForeignKey(blank=True, help_text='owner of this file chunk', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='chunks_owned', to=settings.AUTH_USER_MODEL, verbose_name='owner')),
],
options={
'verbose_name': 'file chunk',
'verbose_name_plural': 'file chunks',
'unique_together': {('file', 'chunk_id')},
},
),
]

View File

@ -1,40 +1,55 @@
from datetime import timedelta
from hashlib import sha256
from pathlib import Path
from common.constants import UPLOAD_STATUS_TYPES
import magic
from common.constants import UPLOAD_ERROR_CODES, UPLOAD_STATUS_TYPES
from common.models import LockboxBase
from common.utils import get_config, get_max_size_chunk_bytes
from django.conf import settings
from django.core.files.uploadedfile import UploadedFile
from django.db import models
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
BROKEN_STATUSES = [
UPLOAD_STATUS_TYPES.ABANDONED,
UPLOAD_STATUS_TYPES.ERROR,
]
def get_upload_path_chunk(instance, filename):
file_subdir = settings.MEDIA_ROOT / str(instance.file.lid)
if not Path.exists(file_subdir):
Path.mkdir(file_subdir)
class UploadError(Exception):
def __init__(self, *args, **kwargs):
self.code = kwargs.pop("code")
super().__init__(*args, **kwargs)
def _upload_to_fielpath(instance, filename):
return Path(str(instance.lid)).joinpath(f"{instance.filename}{settings.INCOMPLETE_EXT}")
class FileQuerySet(models.QuerySet):
'''Regular bulkd delete method but it invokes obj.delete to actually clear the file.
'''
def delete(self, *args, **kwargs):
keep_file = kwargs.pop("keep_file", False)
for obj in self:
obj.delete(keep_file=keep_file)
return super().delete(*args, **kwargs)
filename = f"{FileChunk.last_chunk_id(instance.file)}.chunk"
return Path(str(instance.lid)) / Path(filename)
class File(LockboxBase):
filename = models.CharField(
max_length=255,
null=False,
blank=False,
verbose_name = _("name"),
help_text=_("Name of the file"),
)
extension = models.CharField(
objects = FileQuerySet.as_manager()
mime_type = models.CharField(
max_length=128,
blank=True,
null=True,
verbose_name=_("extension"),
help_text=_("reported filesystem extension (not mime type)"),
verbose_name=_("mime-type"),
help_text=_("reported mime-type"),
)
file = models.FileField(
@ -42,13 +57,22 @@ class File(LockboxBase):
blank=True,
verbose_name=_("file"),
help_text=_("actual file"),
upload_to=_upload_to_fielpath,
)
filename = models.CharField(
null=False,
blank=False,
max_length=256, # safeish in most FS
verbose_name=_("filename"),
help_text=_("file name")
)
UPLOAD_CHOICES = (
(UPLOAD_STATUS_TYPES.UPLOADING, _(UPLOAD_STATUS_TYPES.UPLOADING)),
(UPLOAD_STATUS_TYPES.COMPLETED, _(UPLOAD_STATUS_TYPES.COMPLETED)),
(UPLOAD_STATUS_TYPES.PROCESSING, _(UPLOAD_STATUS_TYPES.PROCESSING)),
(UPLOAD_STATUS_TYPES.ABANDONED, _(UPLOAD_STATUS_TYPES.ABANDONED)),
(UPLOAD_STATUS_TYPES.ERROR, _(UPLOAD_STATUS_TYPES.ERROR)),
)
status = models.CharField(
@ -61,7 +85,7 @@ class File(LockboxBase):
help_text=_("upload status for file"),
)
date_completed = models.DateTimeField(
datetime_completed = models.DateTimeField(
null=True,
blank=True,
verbose_name=_("completed on"),
@ -86,6 +110,14 @@ class File(LockboxBase):
help_text=_("will be scrubbed on 'date_expires'"),
)
sha256 = models.CharField(
null=False,
blank=False,
max_length=64,
verbose_name=_("hash (sha256)"),
help_text=_("file hash (sha256)")
)
delete_on_expiration = models.BooleanField(
null=False,
blank=False,
@ -94,13 +126,20 @@ class File(LockboxBase):
help_text=_("will be deleted if expired and expires is true"),
)
size_on_disk = models.PositiveBigIntegerField(
size = models.PositiveBigIntegerField(
null=True,
blank=True,
verbose_name=_("size on disk (bytes)"),
verbose_name=_("size (bytes)"),
help_text=_("total size on disk for this file"),
)
expected_size = models.PositiveBigIntegerField(
null=False,
blank=False,
verbose_name=_("expected size (bytes)"),
help_text=_("expected file size"),
)
max_size_chunk_bytes = models.PositiveBigIntegerField(
null=False,
blank=False,
@ -109,168 +148,190 @@ class File(LockboxBase):
help_text=_("max size of each individual chunk for this file"),
)
last_end_bytes = models.BigIntegerField(
null=False,
blank=False,
default=0,
verbose_name=("last end bytes"),
help_text=_("last uploaded bytes position"),
)
readonly_fields = [
"extension",
"mime_type",
"status",
"date_completed",
"size_on_disk",
"datetime_completed",
"size",
"file",
"max_size_chunk_bytes",
"last_end_bytes",
*LockboxBase.readonly_fields,
]
def __str__(self):
return self.filename
name = "NO NAME"
if self.file:
name = self.file.name
return f"{name} ({self.lid})"
class Meta:
verbose_name = _("file")
verbose_name_plural = _("files")
@property
def checksum(self):
return 0
@property
def date_expires(self):
return self.date_created + timedelta(minutes=get_config("EXPIRATION_DELTA_MINUTES"))
@property
def abandoned(self):
return self.date_created + timedelta(minutes=get_config("ABANDONED_DELTA_MINUTES"))
return self.date_created <= timezone.now() + timedelta(minutes=get_config("ABANDONED_DELTA_MINUTES"))
@classmethod
def abandoned_condition():
return models.Q(
date_created__lte=timezone.now() + timedelta(minutes=get_config("ABANDONED_DELTA_MINUTES"))
)
@property
def expired(self):
return self.date_expires <= timezone.now()
def delete(self, *args, delete_file=True, **kwargs):
if self.file:
storage, path = self.file.storage, self.file.path
super().delete(*args, **kwargs)
if self.file and delete_file:
storage.delete(path)
@classmethod
def expired_conditon():
return models.Q(date_expires__lte=timezone.now())
# clean up chunks in case they have not been cleaned up by task.
self.chunks.all().delete()
@property
def exists(self):
if not self.file:
return False
return Path(self.file.path).is_file()
def get_file_handler_bytes(self):
@classmethod
def cleanup(self, dry_run=True, skip=None):
# Probably skip some actual files (or record files) first make dry run report
# Then skip=[file_1, file_2]
# Should cleanup be automatic? probably not
# Find whats broken status
# Find what has a record but does not exist
# Find what does have a record but doesnt exist.
# Cleanup any directories that are not from a record (UUID and no uuid.)
# Cleanup any other files?
pass
# broken = File.objects.filter(status__in=BROKEN_STATUSES)
# strays = Path()
@classmethod
def reconcile(self, dry_run=True, delete_strays=True):
# finds stuff that has no record and creates one, moves it to the right place.
# Probably good to call another OS location to copy files to.
# would be cool if it hard linked
pass
def append_chunk(self, chunk_file, chunk_data):
"""Append chunks to a file
Args:
chunk_file (UploadedFile): Django provided abstraction that contains the actual file in memory
chunk_data (Dict): Additional chunk parameters: start_bytes, end_bytes, size
Raises:
UploadError: The file you are trying to append to is not found in FS
UploadError: Expected last_end_bytes + 1 but got a different number
"""
# Override in case recently abandoned
# Will persist if it does not error out.
self.status = UPLOAD_STATUS_TYPES.UPLOADING
# Do not rely on DB file state, check for actual file.
if not self.exists:
# Oh oh, we are uploading a n + 1 chunk but theres no file
if chunk_data["start_bytes"] != 0:
self.status = UPLOAD_STATUS_TYPES.ERROR
self.file.storage.delete()
self.save()
raise UploadError(
"File for uploaded chunk no longer exists",
code=UPLOAD_ERROR_CODES.FILE_MISSING,
)
if self.last_end_bytes and self.last_end_bytes != chunk_data["start_bytes"]:
# Client screwed up, this is not where we left
raise UploadError(
"Mismatch in expected chunk",
code=UPLOAD_ERROR_CODES.CHUNK_MISMATCH
)
self.last_end_bytes = chunk_data["end_bytes"]
if chunk_data["start_bytes"] == 0:
self.file = chunk_file
self.save()
else:
chunk_file.open("rb")
self.file.open("ab")
self.file.write(chunk_file.read())
self.file.close()
self.file.open(mode="rb")
return UploadedFile(file=self.file, name=self.filename, size=self.offset)
self.save()
if self.expected_size == self.last_end_bytes:
self.save()
self.finalize()
return
class FileChunk(LockboxBase):
file = models.ForeignKey(
"storage.File",
null=False,
blank=False,
on_delete=models.CASCADE,
related_name="chunks",
def finalize(self):
"""Finalizes the file
Guesses mimetype
Validates file hash if enabled
Renames file to the originally provided filename, whatever it is.
Sets file status to 'completed'
Sets datetime_completed to now
"""
self.refresh_from_db()
self.mime_type = self.guess_type()
if get_config("VERIFY_ENABLE"):
result = self.verify()
if not result:
self.status = UPLOAD_STATUS_TYPES.ERROR
self.file.storage.delete() # tentative
raise UploadError(
"File verification failed",
code=UPLOAD_ERROR_CODES.VERIFICATION_FAILED
)
chunk = models.FileField(
upload_to=get_upload_path_chunk,
null=False,
blank=False,
verbose_name=_("file"),
help_text=_("actual file"),
)
final_path = settings.MEDIA_ROOT / str(self.lid) / self.filename
chunk_id = models.BigIntegerField(
null=False,
blank=False,
verbose_name=_("chunk id"),
help_text=_("part of chunk"),
)
with transaction.atomic():
Path(self.file.path).rename(final_path)
self.file.name = str(final_path)
self.status = UPLOAD_STATUS_TYPES.COMPLETED
self.datetime_completed = timezone.now()
self.save()
size = models.BigIntegerField(
null=False,
blank=False,
verbose_name=("size"),
help_text=_("size for this chunk"),
)
def verify(self):
if self.exists:
self.file.open("rb")
sha256_hash = sha256(self.file.read()).hexdigest()
self.file.close()
return sha256_hash == self.sha256
raise Exception(f"Fatal: Could get file hash - file {self.file.path} does not exist")
start = models.BigIntegerField(
null=False,
blank=False,
verbose_name=("start"),
help_text=_("start for this chunk"),
)
end = models.BigIntegerField(
null=False,
blank=False,
verbose_name=("end"),
help_text=_("end for this chunk"),
)
owner = models.ForeignKey(
"user.LockboxUser",
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name="chunks_owned",
verbose_name=_("owner"),
help_text=_("owner of this file chunk"),
)
readonly_fields = [
"file",
"chunk_id",
"start",
"end",
"size",
*LockboxBase.readonly_fields,
]
def __str__(self):
return f"{self.file.filename}.{self.chunk_id}.chunk"
class Meta:
verbose_name = _("file chunk")
verbose_name_plural = _("file chunks")
unique_together = ("file", "chunk_id")
def guess_type(self):
self.file.open("rb")
self.file.seek(0)
mime_type = magic.from_buffer(self.file.read(settings.DEFAULT_FILE_HEADER_BYTES), mime=True)
self.file.close()
return mime_type
def save(self, *args, **kwargs):
# nasty hack lol
self.chunk_id = int(Path(self.file.name).stem)
if not self.max_size_chunk_bytes:
self.max_size_chunk_bytes = get_max_size_chunk_bytes()
if self.expected_size > get_config("MAX_FILE_BYTES"):
raise ValidationError(f"Expected size: {self.expected_size} > than config MAX_SIZE_BYTES")
return super().save(*args, **kwargs)
def delete(self, *args, delete_file=True, **kwargs):
if self.chunk:
storage, path = self.chunk.storage, self.chunk.path
super().delete(*args, **kwargs)
if self.chunk and delete_file:
storage.delete(path)
@staticmethod
def last_chunk_id(file_lid):
last_chunk = (
FileChunk.objects.filter(
file__lid=file_lid,
)
.order_by("-chunk_id")
.values("chunk_id")
.first()
.get("chunk_id")
)
if last_chunk:
return last_chunk + 1
return 1
# class FileShare(LockboxBase):
# file = models.ForeignKey(
# "storage.File",
# null=False,
# blank=False,
# on_delete=models.CASCADE,
# related_name="shares",
# )
# def __str__(self):
# return self.file.name
# class Meta:
# verbose_name = _("share")
# verbose_name_plural = _("shares")
def delete(self, *args, **kwargs):
keep_file = kwargs.pop("keep_file", False)
with transaction.atomic():
if not keep_file:
if self.file and self.exists:
self.file.storage.delete(self.file.path)
self.file.storage.delete(Path(self.file.path).parent)
result = super().delete(*args, **kwargs)
return result

View File

@ -1,6 +1,5 @@
from rest_framework import serializers
from storage.models import File, FileChunk
from storage.models import File
class FileSerializer(serializers.ModelSerializer):
@ -9,21 +8,3 @@ class FileSerializer(serializers.ModelSerializer):
model = File
fields = "__all__"
read_only_fields = File.readonly_fields
class FileChunkSerializer(serializers.ModelSerializer):
class Meta:
model = FileChunk
fields = "__all__"
read_only_fields = FileChunk.readonly_fields
def validate(self, data):
data = super().validate(data)
file = File.objects.get(lid=data["file"])
if data["size"] > file.max_size_chunk_bytes:
detail = f"'size' param is larger than max chunk size for file:\
{data["size"]} > {file.max_size_chunk_bytes}"
raise serializers.ValidationError(detail)
return data

View File

@ -1,17 +1,11 @@
from django.urls import include, path
from rest_framework.routers import SimpleRouter
from rest_framework_nested.routers import NestedSimpleRouter
from storage import views_api, views_client
router = SimpleRouter()
router.register(r'files', views_api.FileModelViewSet)
chunk_router = NestedSimpleRouter(router, r'files', lookup="file")
chunk_router.register(r'chunks', views_api.FileChunkViewSet, basename="file-chunks")
router.register(r"files", views_api.FileModelViewSet)
urlpatterns = [
path("api/", include(router.urls)),
path("api/", include(chunk_router.urls)),
path("upload/", views_client.FileUploadView.as_view(), name="client-fileupload"),
]

View File

@ -1,16 +1,17 @@
from common.constants import (
CONTENT_RANGE_HEADER,
CONTENT_RANGE_HEADER_PATTERN,
UPLOAD_STATUS_TYPES,
)
# from common.utils import get_config
from django.core.exceptions import ValidationError
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.exceptions import NotFound
from rest_framework.exceptions import ValidationError as UserValidationError
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
# from user.models import LockboxUser
from storage.models import File, FileChunk
from storage.serializers import FileChunkSerializer, FileSerializer
from storage.models import File, UploadError
from storage.serializers import FileSerializer
class FileModelViewSet(ModelViewSet):
@ -18,16 +19,61 @@ class FileModelViewSet(ModelViewSet):
queryset = File.objects.all()
serializer_class = FileSerializer
@action(detail=True, methods=["post"])
def finalize(self, *args, **kwargs):
file = self.get_object()
file.status = UPLOAD_STATUS_TYPES.PROCESSING
file.save()
return Response(status=status.HTTP_200_OK)
@action(detail=True, methods=["PUT"])
def append_chunk(self, request, filename="DUMMY", format=None, pk=None):
try:
file = File.objects.filter(lid=pk).first()
except ValidationError:
raise UserValidationError(f"UUID {pk} is not a valid UUID")
class FileChunkViewSet(ModelViewSet):
model = FileChunk
queryset = FileChunk.objects.all()
serializer_class = FileChunkSerializer
if not file:
raise NotFound(f"File with ID {pk} was not found")
if file.status in [UPLOAD_STATUS_TYPES.COMPLETED, UPLOAD_STATUS_TYPES.ERROR]:
raise UserValidationError(
f"Cannot append chunk to file, status is: {file.status}"
)
chunk_data = self._get_content_range(request)
if not chunk_data:
raise UserValidationError("Missing content-range headers")
chunk_file = request.FILES.get("Content", None)
if not chunk_file:
raise UserValidationError(
"Could not find 'Content' in request body"
)
# Bytes are inclusive for slicing but not for size, go figure.
if chunk_file.size > file.max_size_chunk_bytes:
raise UserValidationError(
f"Chunk size is greater than files max chunk size: {chunk_file.size} > {file.max_size_chunk_bytes}"
)
range_size = chunk_data["end_bytes"] - chunk_data["start_bytes"]
if chunk_file.size != range_size:
raise UserValidationError(
f"Actual chunk size mismatches content-range header: {chunk_file.size} != {range_size}"
)
chunk_data["size"] = chunk_file.size
try:
file.append_chunk(chunk_file, chunk_data)
except UploadError as e:
return Response({"code": e.code}, status=status.HTTP_400_BAD_REQUEST)
return Response(status=status.HTTP_201_CREATED)
def _get_content_range(self, request):
content_range = request.META.get(CONTENT_RANGE_HEADER, None)
if not content_range:
return None
match = CONTENT_RANGE_HEADER_PATTERN.match(content_range)
if not match:
return None
return {
"start_bytes": int(match.group("start")),
"end_bytes": int(match.group("end")),
}

View File

@ -2,14 +2,11 @@ from common.utils import get_config
from django.shortcuts import render
from django.views import View
from storage.forms import FileForm
# Static view
class FileUploadView(View):
def get(self, request):
context = {
"form": FileForm,
"max_chunk_bytes": get_config("MAX_CHUNK_BYTES"),
"max_file_bytes": get_config("MAX_FILE_BYTES"),
}
return render(request, "storage/upload.html", context=context)

View File

@ -10,6 +10,7 @@
{% block prejs %}
{% endblock %}
<script src="https://cdnjs.cloudflare.com/ajax/libs/crypto-js/4.0.0/crypto-js.min.js"></script>
<title>{% block title %}Lockbox{% endblock %}</title>
</head>

View File

@ -1,37 +1,37 @@
{% extends "base.html" %}
{% block title %}Upload a file{% endblock %}
{% block postjs %}
{% load static %}
{{ block.super }}
<script src="{% static 'js/utils.js' %}"></script>
<script>
const chunk_size = {{ max_chunk_bytes }};
const max_file_bytes = {{ max_file_bytes }};
const uploadPath = "{% url 'file-list' %}";
const chunkPathTemplate = "{% url 'file-append-chunk' pk='@'%}";
const returnPath = "{% url 'file-append-chunk' pk='@'%}"
</script>
<script src="{% static 'js/chunked_uploader.js' %}"></script>
{% endblock %}
{% block content %}
<p> Upload file </p>
<table>
{{form}}
</table>
<p id="max_size">Max size allowed: {{max_file_bytes}} bytes</p>
<p id="file-size"></p>
<form method="post">{% csrf_token %}
<table>
<form>
<input type="file" id="file-upload">
<input type="button" id="upload-button" value="Upload">
</form>
</table>
<p>Progress: </p>
<p id="progressBar"></p>
{% endblock %}

View File

@ -1,9 +1,9 @@
from django.contrib import admin
from user.models import LockboxUser
class LockboxUserAdmin(admin.ModelAdmin):
readonly_fields = LockboxUser.readonly_fields
admin.site.register(LockboxUser, LockboxUserAdmin)

View File

@ -2,5 +2,5 @@ from django.apps import AppConfig
class UserConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'user'
default_auto_field = "django.db.models.BigAutoField"
name = "user"

View File

@ -1,4 +1,4 @@
# Generated by Django 4.2.10 on 2024-02-12 08:39
# Generated by Django 4.2.15 on 2024-09-16 10:45
import django.contrib.auth.validators
from django.db import migrations, models
@ -28,7 +28,7 @@ class Migration(migrations.Migration):
('lid', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False, verbose_name='lockbox ID')),
('date_created', models.DateTimeField(blank=True, help_text='date at which this object was created', verbose_name='date created')),
('date_updated', models.DateTimeField(blank=True, help_text='date at which this object was last updated', verbose_name='date updated')),
('alias', models.SlugField(blank=True, help_text='an alias or nickname to remember who this is', max_length=32, null=True, unique=True, verbose_name='name')),
('alias', models.SlugField(blank=True, help_text='an alias or nickname to remember who this is', max_length=32, null=True, unique=True, verbose_name='alias')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.permission', verbose_name='user permissions')),
],

View File

@ -2,13 +2,12 @@ from common.models import LockboxBase
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.utils.translation import gettext_lazy as _
from user.managers import LockboxUserManager
class LockboxUser(AbstractUser, LockboxBase):
alias = models.SlugField(
verbose_name=_("name"),
verbose_name=_("alias"),
max_length=32,
unique=True,
null=True,

View File

@ -1,5 +1,4 @@
import pytest
from user.models import LockboxUser
@ -8,6 +7,7 @@ class TestUser:
"""
Test user related functions are working correctly.
"""
def test_stub(self):
user = LockboxUser.objects.create(alias="TestUser", username="meow")
loaded_user = LockboxUser.objects.filter(alias="TestUser").first()

323
poetry.lock generated
View File

@ -1,14 +1,14 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "asgiref"
version = "3.7.2"
version = "3.8.1"
description = "ASGI specs, helper code, and adapters"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"},
{file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"},
{file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"},
{file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
]
[package.extras]
@ -27,63 +27,83 @@ files = [
[[package]]
name = "coverage"
version = "7.4.1"
version = "7.6.1"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"},
{file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"},
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"},
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"},
{file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"},
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"},
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"},
{file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"},
{file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"},
{file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"},
{file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"},
{file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"},
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"},
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"},
{file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"},
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"},
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"},
{file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"},
{file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"},
{file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"},
{file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"},
{file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"},
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"},
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"},
{file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"},
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"},
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"},
{file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"},
{file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"},
{file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"},
{file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"},
{file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"},
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"},
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"},
{file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"},
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"},
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"},
{file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"},
{file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"},
{file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"},
{file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"},
{file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"},
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"},
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"},
{file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"},
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"},
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"},
{file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"},
{file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"},
{file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"},
{file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"},
{file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"},
{file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
{file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
{file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
{file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
{file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
{file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
{file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
{file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
{file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
{file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
{file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
{file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
{file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
{file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
{file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
{file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
{file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
{file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
{file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
{file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
{file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"},
{file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"},
{file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"},
{file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"},
{file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"},
{file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"},
{file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"},
{file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"},
{file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
{file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
]
[package.extras]
@ -91,13 +111,13 @@ toml = ["tomli"]
[[package]]
name = "django"
version = "4.2.10"
version = "4.2.15"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
optional = false
python-versions = ">=3.8"
files = [
{file = "Django-4.2.10-py3-none-any.whl", hash = "sha256:a2d4c4d4ea0b6f0895acde632071aff6400bfc331228fc978b05452a0ff3e9f1"},
{file = "Django-4.2.10.tar.gz", hash = "sha256:b1260ed381b10a11753c73444408e19869f3241fc45c985cd55a30177c789d13"},
{file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"},
{file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"},
]
[package.dependencies]
@ -111,18 +131,17 @@ bcrypt = ["bcrypt"]
[[package]]
name = "djangorestframework"
version = "3.14.0"
version = "3.15.2"
description = "Web APIs for Django, made easy."
optional = false
python-versions = ">=3.6"
python-versions = ">=3.8"
files = [
{file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"},
{file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"},
{file = "djangorestframework-3.15.2-py3-none-any.whl", hash = "sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20"},
{file = "djangorestframework-3.15.2.tar.gz", hash = "sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad"},
]
[package.dependencies]
django = ">=3.0"
pytz = "*"
django = ">=4.2"
[[package]]
name = "drf-nested-routers"
@ -139,6 +158,38 @@ files = [
Django = ">=3.2"
djangorestframework = ">=3.14.0"
[[package]]
name = "flake8"
version = "7.1.1"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.8.1"
files = [
{file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"},
{file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"},
]
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.12.0,<2.13.0"
pyflakes = ">=3.2.0,<3.3.0"
[[package]]
name = "flake8-pyproject"
version = "1.2.3"
description = "Flake8 plug-in loading the configuration from pyproject.toml"
optional = false
python-versions = ">= 3.6"
files = [
{file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"},
]
[package.dependencies]
Flake8 = ">=5"
[package.extras]
dev = ["pyTest", "pyTest-cov"]
[[package]]
name = "iniconfig"
version = "2.0.0"
@ -150,51 +201,98 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "isort"
version = "5.13.2"
description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.8.0"
files = [
{file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
{file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
]
[package.extras]
colors = ["colorama (>=0.4.6)"]
[[package]]
name = "mccabe"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
optional = false
python-versions = ">=3.6"
files = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
[[package]]
name = "packaging"
version = "23.2"
version = "24.1"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
name = "pluggy"
version = "1.4.0"
version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
{file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pycodestyle"
version = "2.12.1"
description = "Python style guide checker"
optional = false
python-versions = ">=3.8"
files = [
{file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"},
{file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"},
]
[[package]]
name = "pyflakes"
version = "3.2.0"
description = "passive checker of Python programs"
optional = false
python-versions = ">=3.8"
files = [
{file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
]
[[package]]
name = "pytest"
version = "8.0.0"
version = "8.3.2"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"},
{file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"},
{file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
{file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=1.3.0,<2.0"
pluggy = ">=1.5,<2"
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-cov"
@ -247,84 +345,57 @@ files = [
cli = ["click (>=5.0)"]
[[package]]
name = "pytz"
version = "2024.1"
description = "World timezone definitions, modern and historical"
name = "python-magic"
version = "0.4.27"
description = "File type identification using libmagic"
optional = false
python-versions = "*"
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
]
[[package]]
name = "ruff"
version = "0.2.1"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"},
{file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c92db7101ef5bfc18e96777ed7bc7c822d545fa5977e90a585accac43d22f18a"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13471684694d41ae0f1e8e3a7497e14cd57ccb7dd72ae08d56a159d6c9c3e30e"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a11567e20ea39d1f51aebd778685582d4c56ccb082c1161ffc10f79bebe6df35"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:00a818e2db63659570403e44383ab03c529c2b9678ba4ba6c105af7854008105"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be60592f9d218b52f03384d1325efa9d3b41e4c4d55ea022cd548547cc42cd2b"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd2288890b88e8aab4499e55148805b58ec711053588cc2f0196a44f6e3d855"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ef052283da7dec1987bba8d8733051c2325654641dfe5877a4022108098683"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7022d66366d6fded4ba3889f73cd791c2d5621b2ccf34befc752cb0df70f5fad"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0a725823cb2a3f08ee743a534cb6935727d9e47409e4ad72c10a3faf042ad5ba"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0034d5b6323e6e8fe91b2a1e55b02d92d0b582d2953a2b37a67a2d7dedbb7acc"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e5cb5526d69bb9143c2e4d2a115d08ffca3d8e0fddc84925a7b54931c96f5c02"},
{file = "ruff-0.2.1-py3-none-win32.whl", hash = "sha256:6b95ac9ce49b4fb390634d46d6ece32ace3acdd52814671ccaf20b7f60adb232"},
{file = "ruff-0.2.1-py3-none-win_amd64.whl", hash = "sha256:e3affdcbc2afb6f5bd0eb3130139ceedc5e3f28d206fe49f63073cb9e65988e0"},
{file = "ruff-0.2.1-py3-none-win_arm64.whl", hash = "sha256:efababa8e12330aa94a53e90a81eb6e2d55f348bc2e71adbf17d9cad23c03ee6"},
{file = "ruff-0.2.1.tar.gz", hash = "sha256:3b42b5d8677cd0c72b99fcaf068ffc62abb5a19e71b4a3b9cfa50658a0af02f1"},
{file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"},
{file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"},
]
[[package]]
name = "sqlparse"
version = "0.4.4"
version = "0.5.1"
description = "A non-validating SQL parser."
optional = false
python-versions = ">=3.5"
python-versions = ">=3.8"
files = [
{file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
{file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
{file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"},
{file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"},
]
[package.extras]
dev = ["build", "flake8"]
dev = ["build", "hatch"]
doc = ["sphinx"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "tzdata"
version = "2023.4"
version = "2024.1"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
{file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"},
{file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"},
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
]
[[package]]
name = "whitenoise"
version = "6.6.0"
version = "6.7.0"
description = "Radically simplified static file serving for WSGI applications"
optional = false
python-versions = ">=3.8"
files = [
{file = "whitenoise-6.6.0-py3-none-any.whl", hash = "sha256:b1f9db9bf67dc183484d760b99f4080185633136a273a03f6436034a41064146"},
{file = "whitenoise-6.6.0.tar.gz", hash = "sha256:8998f7370973447fac1e8ef6e8ded2c5209a7b1f67c1012866dbcd09681c3251"},
{file = "whitenoise-6.7.0-py3-none-any.whl", hash = "sha256:a1ae85e01fdc9815d12fa33f17765bc132ed2c54fa76daf9e39e879dd93566f6"},
{file = "whitenoise-6.7.0.tar.gz", hash = "sha256:58c7a6cd811e275a6c91af22e96e87da0b1109e9a53bb7464116ef4c963bf636"},
]
[package.extras]
brotli = ["Brotli"]
brotli = ["brotli"]
[metadata]
lock-version = "2.0"
python-versions = "3.12"
content-hash = "e338f5cc37553ef6a4799746f6feb537427330934b43caee4aa73c3b74a0fb9e"
python-versions = "~3.12"
content-hash = "cf73bb83fc48555289dd3949c6bf10a7feab817496ab8f4826222a2f9b2bad0a"

View File

@ -7,18 +7,20 @@ license = "MIT"
readme = "README.md"
[tool.poetry.dependencies]
python = "3.12"
python = "~3.12"
django = "~4.2.0"
whitenoise = "^6.6.0"
djangorestframework = "^3.14.0"
drf-nested-routers = "^0.93.5"
python-dotenv = "^1.0.1"
python-magic = "^0.4.27"
[tool.poetry.group.dev.dependencies]
pytest = "^8.0.0"
pytest-django = "^4.8.0"
pytest-cov = "^4.1.0"
ruff = "^0.2.1"
flake8-pyproject = "^1.2.3"
isort = "^5.13.2"
[tool.pytest.ini_options]
@ -50,7 +52,7 @@ omit = [
"lockbox/wsgi.py",
]
[tool.ruff]
[tool.flake8]
exclude = [
"*/migrations/*",
".pyscripts/*",
@ -60,13 +62,6 @@ exclude = [
".venv",
"manage.py",
]
force-exclude = true
line-length = 120
target-version = "py312"
[tool.ruff.lint]
select = ["ALL"]
ignore = [
"ANN",
"ARG001",
@ -78,7 +73,6 @@ ignore = [
"ERA001",
"FIX",
"N801",
"PLR0913",
"Q000",
"RUF012",
"S101",
@ -88,6 +82,8 @@ ignore = [
"TD",
"TRY",
]
max-line-length = 120
[build-system]
requires = ["poetry-core"]

View File

@ -1,9 +1,9 @@
asgiref==3.7.2 ; python_version == "3.12"
django==4.2.10 ; python_version == "3.12"
djangorestframework==3.14.0 ; python_version == "3.12"
drf-nested-routers==0.93.5 ; python_version == "3.12"
python-dotenv==1.0.1 ; python_version == "3.12"
pytz==2024.1 ; python_version == "3.12"
sqlparse==0.4.4 ; python_version == "3.12"
tzdata==2023.4 ; sys_platform == "win32" and python_version == "3.12"
whitenoise==6.6.0 ; python_version == "3.12"
asgiref==3.8.1 ; python_version >= "3.12" and python_version < "3.13"
django==4.2.15 ; python_version >= "3.12" and python_version < "3.13"
djangorestframework==3.15.2 ; python_version >= "3.12" and python_version < "3.13"
drf-nested-routers==0.93.5 ; python_version >= "3.12" and python_version < "3.13"
python-dotenv==1.0.1 ; python_version >= "3.12" and python_version < "3.13"
python-magic==0.4.27 ; python_version >= "3.12" and python_version < "3.13"
sqlparse==0.5.1 ; python_version >= "3.12" and python_version < "3.13"
tzdata==2024.1 ; python_version >= "3.12" and python_version < "3.13" and sys_platform == "win32"
whitenoise==6.7.0 ; python_version >= "3.12" and python_version < "3.13"

View File

@ -1,18 +1,23 @@
asgiref==3.7.2 ; python_version == "3.12"
colorama==0.4.6 ; python_version == "3.12" and sys_platform == "win32"
coverage[toml]==7.4.1 ; python_version == "3.12"
django==4.2.10 ; python_version == "3.12"
djangorestframework==3.14.0 ; python_version == "3.12"
drf-nested-routers==0.93.5 ; python_version == "3.12"
iniconfig==2.0.0 ; python_version == "3.12"
packaging==23.2 ; python_version == "3.12"
pluggy==1.4.0 ; python_version == "3.12"
pytest-cov==4.1.0 ; python_version == "3.12"
pytest-django==4.8.0 ; python_version == "3.12"
pytest==8.0.0 ; python_version == "3.12"
python-dotenv==1.0.1 ; python_version == "3.12"
pytz==2024.1 ; python_version == "3.12"
ruff==0.2.1 ; python_version == "3.12"
sqlparse==0.4.4 ; python_version == "3.12"
tzdata==2023.4 ; sys_platform == "win32" and python_version == "3.12"
whitenoise==6.6.0 ; python_version == "3.12"
asgiref==3.8.1 ; python_version >= "3.12" and python_version < "3.13"
colorama==0.4.6 ; python_version >= "3.12" and python_version < "3.13" and sys_platform == "win32"
coverage[toml]==7.6.1 ; python_version >= "3.12" and python_version < "3.13"
django==4.2.15 ; python_version >= "3.12" and python_version < "3.13"
djangorestframework==3.15.2 ; python_version >= "3.12" and python_version < "3.13"
drf-nested-routers==0.93.5 ; python_version >= "3.12" and python_version < "3.13"
flake8-pyproject==1.2.3 ; python_version >= "3.12" and python_version < "3.13"
flake8==7.1.1 ; python_version >= "3.12" and python_version < "3.13"
iniconfig==2.0.0 ; python_version >= "3.12" and python_version < "3.13"
isort==5.13.2 ; python_version >= "3.12" and python_version < "3.13"
mccabe==0.7.0 ; python_version >= "3.12" and python_version < "3.13"
packaging==24.1 ; python_version >= "3.12" and python_version < "3.13"
pluggy==1.5.0 ; python_version >= "3.12" and python_version < "3.13"
pycodestyle==2.12.1 ; python_version >= "3.12" and python_version < "3.13"
pyflakes==3.2.0 ; python_version >= "3.12" and python_version < "3.13"
pytest-cov==4.1.0 ; python_version >= "3.12" and python_version < "3.13"
pytest-django==4.8.0 ; python_version >= "3.12" and python_version < "3.13"
pytest==8.3.2 ; python_version >= "3.12" and python_version < "3.13"
python-dotenv==1.0.1 ; python_version >= "3.12" and python_version < "3.13"
python-magic==0.4.27 ; python_version >= "3.12" and python_version < "3.13"
sqlparse==0.5.1 ; python_version >= "3.12" and python_version < "3.13"
tzdata==2024.1 ; python_version >= "3.12" and python_version < "3.13" and sys_platform == "win32"
whitenoise==6.7.0 ; python_version >= "3.12" and python_version < "3.13"

View File

@ -1,2 +1,3 @@
printf "\n\n|| Starting ruff check ||\n\n"
ruff check --config=./pyproject.toml
printf "\n\n|| Starting lint check ||\n\n"
flake8
printf "\n\n|| Finished lint check ||\n\n"

View File

@ -1,3 +1,4 @@
cd lockbox
printf "\n\n|| Starting setup ||\n\n"
python manage.py migrate
printf "\n\n|| Finished setup ||\n\n"

View File

@ -1,3 +1,4 @@
cd lockbox
printf "\n\n|| Starting pytest run ||\n\n"
pytest --cov=. --cov-report term-missing --reuse-db
printf "\n\n|| Finished pytest run ||\n\n"