From f742d9efeadbdf74a29ab3bad09682505e72fc92 Mon Sep 17 00:00:00 2001 From: Klaas Hoekema Date: Tue, 10 Dec 2024 00:21:34 -0500 Subject: [PATCH 1/3] Add fixture with sample analysis results Adds some lines to the 'update' script to download a data fixture containing a few neighborhoods and analysis results and import it into the database. Because the import is a bit slow and only needs to be done on initial setup, this also modifies the 'update' script to take a '--load-data' argument, so that it will only load the fixture (and load crash data, which also takes some time and doesn't need to be repeated) on demand, not by default. The fixture itself is on S3, in the 'test-pfb-inputs' bucket, and the script automatically downloads it. Also in S3: the supporting files, like downloadable geometries and GeoJSON files of destination locations, that get saved to S3 and used by the front end in various ways. Those have also been added to the 'test-pfb-inputs' bucket and 'update' has an `aws s3 sync` command to copy them to the user's development S3 bucket. --- .gitignore | 1 + CHANGELOG.md | 2 ++ scripts/update | 28 ++++++++++++++++++++++++++-- 3 files changed, 29 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 0ceb0990..c2b83350 100644 --- a/.gitignore +++ b/.gitignore @@ -73,6 +73,7 @@ coverage.xml # Django stuff: *.log src/django/neighborhood_boundaries/* +src/django/pfb_analysis/fixtures/sample_analysis_results.json.gz # Sphinx documentation docs/_build/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 275388aa..33caa99a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Upcoming release] +- Add fixture with sample analysis results + ## [0.20.0] - 2024-03-04 - Update OSM tags for doctors, dentists, hospitals, and pharmacies diff --git a/scripts/update b/scripts/update index a4daea72..ff029b1b 100755 --- a/scripts/update +++ b/scripts/update @@ -57,8 +57,28 @@ function run_database_migrations() { function run_data_fixtures() { docker compose up -d database check_database + echo "*** Loading score metadata fixture..." docker compose run --rm --entrypoint python3 django manage.py loaddata analysis-score-metadata - docker compose run --rm --entrypoint python3 django manage.py import_crash_data + + if [ $1 ]; then + echo "*** Importing crash data..." + docker compose run --rm --entrypoint python3 django manage.py import_crash_data + + # The sample analysis results fixture takes a few steps + echo "*** Downloading sample analysis results fixture..." + aws --profile pfb s3 sync --exclude "*" --include "sample_analysis_results.json.gz" \ + "s3://test-pfb-inputs/" src/django/pfb_analysis/fixtures/ + echo "*** Loading sample analysis results fixture..." + docker compose run --rm --entrypoint python3 django manage.py loaddata sample_analysis_results + echo "*** Copying S3 files for sample analysis results fixture into your bucket..." + # If we're running on host rather than in Vagrant, source .env to get the DEV_USER variable + if [ -z $DEV_USER ]; then + source .env + fi + aws --profile pfb s3 sync --quiet \ + "s3://test-pfb-inputs/fixture_results_files/" \ + "s3://${DEV_USER}-pfb-storage-us-east-1/results/" + fi docker compose stop database } @@ -73,7 +93,11 @@ then docker compose build --pull database django angularjs analysis tilegarden django-q run_database_migrations - run_data_fixtures + if [ "${1:-}" = "--load-data" ]; then + run_data_fixtures true + else + run_data_fixtures + fi echo "Copying angular site to nginx" pushd "src/nginx" From 8430387c90ac8ceb6bb697723925dcd2ca19bb9a Mon Sep 17 00:00:00 2001 From: Klaas Hoekema Date: Tue, 10 Dec 2024 09:45:19 -0500 Subject: [PATCH 2/3] Set organization UUID in initial migration We already hard-coded a user UUID in this migration, possibly to facilitate moving fixtures between development environments. This sets the organization UUID to a fixed value as well, to facilitate transferring neighborhoods in a fixture. --- scripts/setup | 2 +- src/django/users/migrations/0001_initial.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/setup b/scripts/setup index 8743038b..6562df30 100755 --- a/scripts/setup +++ b/scripts/setup @@ -30,7 +30,7 @@ then vagrant up --provision - vagrant ssh -c "cd /vagrant && ./scripts/update" + vagrant ssh -c "cd /vagrant && ./scripts/update --load-data" popd fi diff --git a/src/django/users/migrations/0001_initial.py b/src/django/users/migrations/0001_initial.py index 3e395e24..cb88631e 100644 --- a/src/django/users/migrations/0001_initial.py +++ b/src/django/users/migrations/0001_initial.py @@ -47,6 +47,7 @@ def add_root_org(apps, schema_editor): root_org = Organization.objects.create(created_by=root_user, modified_by=root_user, name='root', + uuid='7adf4108-f9e4-4f1f-ae9c-4fccf55d9796', org_type='ADMIN', label='Default administrative organization') root_org.save() From 0ed069355dd5560fea2757c8fd654bacac203a53 Mon Sep 17 00:00:00 2001 From: Klaas Hoekema Date: Tue, 17 Dec 2024 14:44:22 -0500 Subject: [PATCH 3/3] Update and clarify README, esp for non-Vagrant users Adds a few details and some additional organization to make the README more accurate and usable, especially for people using docker-on-host rather than Vagrant. --- .gitignore | 3 +++ README.md | 65 ++++++++++++++++++++++++++++++++---------------------- 2 files changed, 42 insertions(+), 26 deletions(-) diff --git a/.gitignore b/.gitignore index c2b83350..f71b4009 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ Thumbs.db #Vagrant .vagrant +#Env file used for docker-on-host +.env + #Ansible deployment/ansible/group_vars/all deployment/ansible/roles/azavea.* diff --git a/README.md b/README.md index 06f287c9..5dd0f25e 100644 --- a/README.md +++ b/README.md @@ -17,30 +17,27 @@ Requirements: 3. Before starting the VM, ensure the ENV variable `PFB_SHARED_FOLDER_TYPE=virtualbox` is set. NFS is not supported on windows, so we need to ensure that Vagrant ignores our request for it. 4. Do not use `vagrant reload`. In some cases it will create a new VM rather than autodetecting that the old one exists -#### Notes for non-Windows users +#### Notes for non-Windows Vagrant users 1. An NFS daemon must be running on the host machine. This should be enabled by default on MacOS. Linux computers may require the installation of an additional package such as nfs-kernel-server on Ubuntu. 2. For some commands (e.g., `./scripts/test`), you may need to add the ENV variable `PFB_SHARED_FOLDER_TYPE=virtualbox` for the shared folders to work as expected with Django. -#### Notes for Docker only users +#### Notes for Docker-only users -This mirrors what Jenkins does and may not work for all tasks. +The Vagrant VM provides a few tools and environment variables. To use Docker on host: -1. Setup AWS credentials like below. -2. Setup an export file (or manually) like: +1. Install the AWS CLI on your machine, and set up AWS credentials as described below. +2. Create an `.env` file to provide values for variables that don't have defaults in the docker-compose file: + ```bash + DEV_USER=$USER + AWS_PROFILE=pfb + PFB_AWS_BATCH_ANALYSIS_JOB_QUEUE_NAME='dummy-test-pfb-analysis-job-queue' + PFB_AWS_BATCH_ANALYSIS_JOB_DEFINITION_NAME_REVISION='dummy-test-pfb-analysis-run-job:1' + PFB_AWS_BATCH_ANALYSIS_JOB_DEFINITION_NAME='dummy-test-pfb-analysis-run-job' + ``` + Note: `DEV_USER` doesn't have to be your local username, but that's a convenient default. If you want something different, or if your system doesn't set `$USER` in your environment, just hard-code a (unique) value of your choice. -```bash -export AWS_DEFAULT_REGION="us-east-1" -export PFB_SETTINGS_BUCKET="staging-pfb-config-us-east-1" -export PFB_S3STORAGE_BUCKET="staging-pfb-static-us-east-1" -export PFB_AWS_BATCH_ANALYSIS_JOB_QUEUE_NAME="dummy-test-pfb-analysis-job-queue" -export PFB_AWS_BATCH_ANALYSIS_JOB_DEFINITION_NAME_REVISION="dummy-test-pfb-analysis-run-job:1" -export AWS_PROFILE=pfb -export GIT_COMMIT=0577186 -export BATCH_ANALYSIS_JOB_NAME_REVISION="dummy-test-pfb-analysis-run-job:1" -``` - -3. Use update and server STRTA (do not use setup) +3. Ignore `./scripts/setup`, which provisions the Vagrant VM, and just use `./scripts/update` ### Setting up AWS credentials @@ -73,29 +70,45 @@ Run `./scripts/setup` to install project dependencies and prepare the developmen vagrant ssh ``` -Once in the VM, if you added AWS credentials above, run the following commands to configure your development S3 buckets: +### Creating your development S3 bucket + +If you added AWS credentials above, run the following commands to configure your development S3 bucket. If you're running without Vagrant, either replace `${DEV_USER}` on the first line with the value you're using, or run `source .env` to set it in your shell. ``` -aws s3api create-bucket --bucket "${DEV_USER}-pfb-storage-us-east-1" -aws s3api put-bucket-policy --bucket "${DEV_USER}-pfb-storage-us-east-1" --policy "{\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":\"*\",\"Action\":\"s3:GetObject\",\"Resource\":\"arn:aws:s3:::${DEV_USER}-pfb-storage-us-east-1/*\"}]}" +export PFB_DEV_BUCKET="${DEV_USER}-pfb-storage-us-east-1" +aws s3api create-bucket --bucket $PFB_DEV_BUCKET +aws s3api put-bucket-policy --bucket $PFB_DEV_BUCKET --policy "{\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":\"*\",\"Action\":\"s3:GetObject\",\"Resource\":\"arn:aws:s3:::${PFB_DEV_BUCKET}/*\"}]}" +aws s3api put-bucket-cors --bucket $PFB_DEV_BUCKET --cors-configuration "{\"CORSRules\":[{\"AllowedHeaders\":[\"Authorization\"],\"AllowedMethods\":[\"GET\"],\"AllowedOrigins\":[\"*\"],\"ExposeHeaders\":[],\"MaxAgeSeconds\":3000}]}" ``` At this point, if you only intend to run the 'Bike Network Analysis', skip directly to [Running the Analysis](#running-the-analysis) -To start the application containers (from within the Vagrant VM): +### Running the development server + +The following commands should be run from within the Vagrant VM if you're using it, or from the project root directory on your host machine if not. + +For initial setup or to apply dependency or database updates, run: ``` -./scripts/server +./scripts/update --load-data ``` -In order to use the API, you'll need to run migrations on the Django app server: +To start the application containers, run: ``` -./scripts/django-manage migrate +./scripts/server +``` + +### Using the development app -This will add a default admin user that can log in to http://localhost:9200/api/ as: -systems+pfb@azavea.com / root +The migrations that get run by `scripts/update` will add a default admin user: ``` +Username: systems+pfb@azavea.com +Password: root +``` + +These credentials will work to log in to either the front-end admin (http://localhost:9301/#/login/) or the Django Rest Framework development interface (http://localhost:9200/api/). + ## Ports