diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..6acab60 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,7 @@ +[*.sh] +indent_style = space +indent_size = 4 + +[.envrc] +indent_style = space +indent_size = 4 diff --git a/.envrc b/.envrc index 256d758..2066d11 100644 --- a/.envrc +++ b/.envrc @@ -5,13 +5,41 @@ layout_postgres() { export PGHOST="$PGDATA" if [[ ! -d "$PGDATA" ]]; then initdb - echo -e "listen_addresses = 'localhost'\nunix_socket_directories = '$PGHOST'" >> "$PGDATA/postgresql.conf" + echo -e "listen_addresses = 'localhost'\nunix_socket_directories = '$PGHOST'" >>"$PGDATA/postgresql.conf" echo "CREATE DATABASE django;" | postgres --single -E postgres fi } layout postgres + +layout_poetry() { + PYPROJECT_TOML="${PYPROJECT_TOML:-pyproject.toml}" + if [[ ! -f "$PYPROJECT_TOML" ]]; then + log_status "No pyproject.toml found. Executing \`poetry init\` to create a \`$PYPROJECT_TOML\` first." + poetry init + fi + + if [[ -d ".venv" ]]; then + VIRTUAL_ENV="$(pwd)/.venv" + else + VIRTUAL_ENV=$( + poetry env info --path 2>/dev/null + true + ) + fi + + if [[ -z $VIRTUAL_ENV || ! -d $VIRTUAL_ENV ]]; then + log_status "No virtual environment exists. Executing \`poetry install\` to create one." + poetry install + VIRTUAL_ENV=$(poetry env info --path) + fi + + PATH_add "$VIRTUAL_ENV/bin" + export POETRY_ACTIVE=1 + export VIRTUAL_ENV +} + +layout poetry export PROJECT_DIR=$(pwd) export WEBPORT=$(($RANDOM + 1100)) export PGPORT=$(($WEBPORT + 100)) watch_file "$PGDATA/postgresql.conf" - diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fc6347a..4d6373f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -13,6 +13,9 @@ jobs: - uses: actions/checkout@v3 - uses: cachix/install-nix-action@v22 - uses: DeterminateSystems/magic-nix-cache-action@main + - name: Environment setup + run: nix develop --command bash -c "poetry install" - name: Test - run: | - nix flake check -L -j auto + run: nix develop --command bash -c "dev test" + env: + PROJECT_DIR: ${{ github.workspace }} diff --git a/dev.sh b/dev.sh index 4045f28..4c27edf 100755 --- a/dev.sh +++ b/dev.sh @@ -2,12 +2,12 @@ # Helper functions not exposed to the user { # Load example data -_init () { +_init() { python ./src/manage.py loaddata src/network_inventory.yaml } # Setup the database -_setup () { +_setup() { overmind start -l db -D if [ -f .direnv/first_run ]; then sleep 2 @@ -41,15 +41,15 @@ _setup () { sleep 2 } -_open_url () { +_open_url() { if [[ ! -z "${DEFAULT_BROWSER}" ]]; then $DEFAULT_BROWSER $url - elif type explorer.exe &> /dev/null; then + elif type explorer.exe &>/dev/null; then explorer.exe $url fi } -_create_url () { +_create_url() { if [ -f /etc/wsl.conf ]; then echo "http://localhost:$WEBPORT" else @@ -62,7 +62,7 @@ _create_url () { declare -A tasks declare -A descriptions -run () { +run() { _setup find . -name __pycache__ -o -name "*.pyc" -delete url=$(_create_url) @@ -76,25 +76,25 @@ tasks["run"]=run descriptions["start"]="Alias for run." tasks["start"]=run -stop () { +stop() { overmind quit } descriptions["stop"]="Stop the webserver and DB." tasks["stop"]=stop -venv () { +venv() { nix build .#venv -o .venv } descriptions["venv"]="Build a pseudo venv that editors like VS Code can use." tasks["venv"]=venv -build-container (){ - nix build && docker load < result && docker run --rm -ti network-inventory:latest +build-container() { + nix build && docker load \n\n" - for task in "${!tasks[@]}" - do + for task in "${!tasks[@]}"; do echo "$task - ${descriptions[$task]}" done else # Check if task is available - if [[ -v "tasks[$1]" ]] ; then + if [[ -v "tasks[$1]" ]]; then ${tasks["$1"]} else echo "Task not found." diff --git a/flake.lock b/flake.lock index 7dadf77..1006455 100644 --- a/flake.lock +++ b/flake.lock @@ -18,45 +18,6 @@ "type": "github" } }, - "flake-utils_2": { - "inputs": { - "systems": "systems_2" - }, - "locked": { - "lastModified": 1687709756, - "narHash": "sha256-Y5wKlQSkgEK2weWdOu4J3riRd+kV/VCgHsqLNTTWQ/0=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "dbabf0ca0c0c4bce6ea5eaf65af5cb694d2082c7", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "nix-github-actions": { - "inputs": { - "nixpkgs": [ - "poetry2nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1690856737, - "narHash": "sha256-34M2FVqiKCwFUmA8/LnwxnQYiFZIL4QiNqQ1NRQX2gs=", - "owner": "nix-community", - "repo": "nix-github-actions", - "rev": "bfeb681177b5128d061ebbef7ded30bc21a3f135", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "nix-github-actions", - "type": "github" - } - }, "nixpkgs": { "locked": { "lastModified": 1688918189, @@ -73,33 +34,10 @@ "type": "github" } }, - "poetry2nix": { - "inputs": { - "flake-utils": "flake-utils_2", - "nix-github-actions": "nix-github-actions", - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1693051011, - "narHash": "sha256-HNbuVCS/Fnl1YZOjBk9/MlIem+wM8fvIzTH0CVQrLSQ=", - "owner": "nix-community", - "repo": "poetry2nix", - "rev": "5b3a5151cf212021ff8d424f215fb030e4ff2837", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "poetry2nix", - "type": "github" - } - }, "root": { "inputs": { "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs", - "poetry2nix": "poetry2nix" + "nixpkgs": "nixpkgs" } }, "systems": { @@ -116,21 +54,6 @@ "repo": "default", "type": "github" } - }, - "systems_2": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } } }, "root": "root", diff --git a/flake.nix b/flake.nix index c10cefc..cdd656f 100644 --- a/flake.nix +++ b/flake.nix @@ -3,34 +3,12 @@ inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; flake-utils.url = "github:numtide/flake-utils"; - poetry2nix = { - url = "github:nix-community/poetry2nix"; - inputs.nixpkgs.follows = "nixpkgs"; - }; - }; - outputs = { self, nixpkgs, flake-utils, poetry2nix }: - { - overlays.default = nixpkgs.lib.composeManyExtensions [ - poetry2nix.overlay - (final: prev: rec { - inventoryDevEnv = prev.poetry2nix.mkPoetryEnv - { - projectDir = ./.; - groups = [ "main" "dev" ]; - }; - inventoryPackage = prev.poetry2nix.mkPoetryApplication { - projectDir = ./.; - groups = [ "main" ]; - }; - inventoryEnv = inventoryPackage.dependencyEnv; - }) - ]; - } // (flake-utils.lib.eachDefaultSystem (system: + outputs = { self, nixpkgs, flake-utils }: + (flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; - overlays = [ self.overlays.default ]; }; inventory = pkgs.stdenv.mkDerivation { src = ./.; @@ -45,9 +23,7 @@ rec { devShells.default = pkgs.mkShell { buildInputs = [ - pkgs.inventoryDevEnv pkgs.poetry - pkgs.python310Packages.pip pkgs.overmind pkgs.postgresql_15 (pkgs.writeScriptBin "dev" "${builtins.readFile ./dev.sh}") @@ -57,44 +33,7 @@ export DJANGO_SETTINGS_MODULE=network_inventory.settings.local ''; }; - checks = { - lint = pkgs.stdenv.mkDerivation { - dontPatch = true; - dontConfigure = true; - dontBuild = true; - dontInstall = true; - doCheck = true; - name = "lint"; - src = ./.; - checkInputs = [ pkgs.inventoryDevEnv ]; - checkPhase = '' - mkdir -p $out - pylint --rc-file pyproject.toml -j 0 -E src/ - cd src/ && mypy --config-file=../pyproject.toml . - ''; - DJANGO_SETTINGS_MODULE = "network_inventory.settings.ram_test"; - }; - tests = pkgs.stdenv.mkDerivation { - dontPatch = true; - dontConfigure = true; - dontBuild = true; - dontInstall = true; - doCheck = true; - name = "test"; - src = ./.; - checkInputs = [ pkgs.inventoryDevEnv pkgs.postgresql_15 pkgs.overmind ]; - checkPhase = '' - mkdir -p $out - pytest --ds=network_inventory.settings.ram_test \ - -nauto \ - --nomigrations \ - --cov=./src \ - ./src - ''; - }; - }; packages = { - venv = pkgs.inventoryEnv; container = pkgs.dockerTools.buildImage { name = "network-inventory"; tag = "latest"; @@ -102,38 +41,39 @@ copyToRoot = pkgs.buildEnv { name = "image-root"; paths = [ + inventory pkgs.bashInteractive pkgs.coreutils - inventory + pkgs.poetry (pkgs.writeShellScriptBin "start-inventory" '' if [ -f .first_run ]; then sleep 2 - ${pkgs.inventoryEnv}/bin/django-admin collectstatic --noinput - ${pkgs.inventoryEnv}/bin/django-admin makemigrations - ${pkgs.inventoryEnv}/bin/django-admin migrate + django-admin collectstatic --noinput + django-admin makemigrations + django-admin migrate else - ${pkgs.inventoryEnv}/bin/django-admin collectstatic --noinput - ${pkgs.inventoryEnv}/bin/django-admin makemigrations backups - ${pkgs.inventoryEnv}/bin/django-admin makemigrations computers - ${pkgs.inventoryEnv}/bin/django-admin makemigrations core - ${pkgs.inventoryEnv}/bin/django-admin makemigrations customers - ${pkgs.inventoryEnv}/bin/django-admin makemigrations devices - ${pkgs.inventoryEnv}/bin/django-admin makemigrations licenses - ${pkgs.inventoryEnv}/bin/django-admin makemigrations nets - ${pkgs.inventoryEnv}/bin/django-admin makemigrations softwares - ${pkgs.inventoryEnv}/bin/django-admin makemigrations users - ${pkgs.inventoryEnv}/bin/django-admin makemigrations - ${pkgs.inventoryEnv}/bin/django-admin migrate - ${pkgs.inventoryEnv}/bin/django-admin loaddata backups - ${pkgs.inventoryEnv}/bin/django-admin loaddata computers - ${pkgs.inventoryEnv}/bin/django-admin loaddata core - ${pkgs.inventoryEnv}/bin/django-admin loaddata devices - ${pkgs.inventoryEnv}/bin/django-admin loaddata nets - ${pkgs.inventoryEnv}/bin/django-admin loaddata softwares - ${pkgs.inventoryEnv}/bin/django-admin shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.create_superuser('admin', 'admin@example.com', 'password')" + django-admin collectstatic --noinput + django-admin makemigrations backups + django-admin makemigrations computers + django-admin makemigrations core + django-admin makemigrations customers + django-admin makemigrations devices + django-admin makemigrations licenses + django-admin makemigrations nets + django-admin makemigrations softwares + django-admin makemigrations users + django-admin makemigrations + django-admin migrate + django-admin loaddata backups + django-admin loaddata computers + django-admin loaddata core + django-admin loaddata devices + django-admin loaddata nets + django-admin loaddata softwares + django-admin shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.create_superuser('admin', 'admin@example.com', 'password')" touch .first_run fi - ${pkgs.inventoryEnv}/bin/gunicorn network_inventory.wsgi:application --reload --bind 0.0.0.0:8000 --workers 3 + gunicorn network_inventory.wsgi:application --reload --bind 0.0.0.0:8000 --workers 3 '') ]; }; diff --git a/pyproject.toml b/pyproject.toml index a6681dd..b5c5745 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,6 @@ license = "GPLv3" packages = [ { include = "src" }, ] -virtualenvs.in-project = true [tool.mypy] exclude = [