-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathMakefile
324 lines (265 loc) · 10.8 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
#!make
# Default values, can be overridden either on the command line of make
# or in .env
.PHONY: version vars init-venv build build-travis init init-db reset \
up down logs restart restart-web \
superadmin collectstatic migrations migrate \
dump restore release push-prod deploy \
fast-test test test-render stop coverage shell bash \
migration-load-dump migration-build-delta \
migration-post-generate-csvs migration-migrate \
migration-migrate-selective-with-subset migration-migrate-all \
migration-post-generate-csv-all setup-openshift
VERSION:=$(shell python update_release.py -v)
version:
@echo VERSION set to $(VERSION)
vars:
@echo 'Used by App:'
@echo ' SECRET_KEY=${SECRET_KEY}'
@echo ' DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE}'
@echo ' SERVER_HOST=${SERVER_HOST}'
@echo ' DEV_PORT=${DEV_PORT}'
@echo ' SITE_PATH=${SITE_PATH}'
@echo ' SITE_URL=${SITE_URL}'
@echo ' ALLOWED_HOSTS=${ALLOWED_HOSTS}'
@echo ' DATABASE_URL=${DATABASE_URL}'
@echo ''
@echo 'Used by Makefile'
@echo ' SUPER_ADMIN_USERNAME=${SUPER_ADMIN_USERNAME}'
@echo ' SUPER_ADMIN_EMAIL=${SUPER_ADMIN_EMAIL}'
@echo ' GITHUB_OWNER=${GITHUB_OWNER}'
@echo ' GITHUB_REPO=${GITHUB_REPO}'
@echo ' GITHUB_USER=${GITHUB_USER}'
@echo ' GITHUB_KEY=${GITHUB_KEY}'
@echo ' DATABASE_USER=${DATABASE_USER}'
@echo ' DATABASE_PASSWORD=xxx'
@echo ' DB_NAME=${DB_NAME}'
@echo ''
@echo 'Defined as helpers'
@echo ' DB_URL=${DB_URL}'
init-venv:
ifeq ($(wildcard .env),)
cp env/django.env .env
echo SECRET_KEY=`openssl rand -base64 32` >> .env
echo PYTHONPATH=`pwd`/infoscience_exports >> .env
@echo "! Set up your .env file before running"
endif
@echo "! If you want a clean state from a docker standpoint, run"
@echo "! $$ make reset"
build:
# udpating requirements
pipenv lock
# build docker image
docker compose -f docker-compose-dev.yml stop
docker compose -f docker-compose-dev.yml build
init-db:
# create DB
docker compose -f docker-compose-dev.yml exec postgres \
psql -c 'CREATE DATABASE "${DB_NAME}";' -U postgres
# create DB user for app
docker compose -f docker-compose-dev.yml exec postgres \
psql ${DB_NAME} -c "CREATE USER ${DATABASE_USER} WITH PASSWORD '${DATABASE_PASSWORD}';" -U postgres
docker compose -f docker-compose-dev.yml exec postgres \
psql ${DB_NAME} -c "ALTER ROLE ${DATABASE_USER} WITH CREATEDB;" -U postgres
# initialize DBs executing migration scripts
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py makemigrations
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py migrate
# make a room for the cache
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py createcachetable
# create super admin in app
make superadmin
@echo " -> All set up! You can connect with your tequila acount or the admin (${SUPER_ADMIN_EMAIL})"
init: reset
reset: build up
sleep 3
make init-db
make collectstatic
make compilemessages
up:
docker compose -f docker-compose-dev.yml up -d
stop:
docker compose -f docker-compose-dev.yml stop
down:
@echo -n "Are you sure? The db will be lost [y/N] " && read ans && [ $${ans:-N} = y ]
docker compose -f docker-compose-dev.yml down
logs:
docker compose -f docker-compose-dev.yml logs -f
restart:
docker compose -f docker-compose-dev.yml stop
docker compose -f docker-compose-dev.yml up -d
docker compose -f docker-compose-dev.yml logs
restart-web:
docker compose -f docker-compose-dev.yml stop web
docker compose -f docker-compose-dev.yml start web
superadmin:
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py shell -c "from django.contrib.auth import get_user_model; \
User = get_user_model(); \
User.objects.filter(email='${SUPER_ADMIN_EMAIL}').delete(); \
User.objects.create_superuser('${SUPER_ADMIN_USERNAME}', '${SUPER_ADMIN_EMAIL}', '${SUPER_ADMIN_PASSWORD}');"
collectstatic: up
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py collectstatic --noinput
migrations: up
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py makemigrations
migrate: up
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py migrate
messages: up
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py makemessages --all
compilemessages: up
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py compilemessages
dump:
@echo dumping DB on last commit `git rev-parse --verify HEAD`
docker compose -f docker-compose-dev.yml run --rm \
-v $(shell pwd)/backup/:/backup \
postgres sh -c 'exec pg_dump -C -hpostgres -Upostgres -Ox -Ft \
-f/backup/$(shell date +"%F:%T")-$(shell git rev-parse --verify HEAD).sql.tar -d${DB_NAME}'
restore:
@echo restoring DB from file `ls -t backup/*.sql.tar | head -1`
# retrieve commit number and checkout
git checkout $(shell ls -t backup/*.sql.tar | head -1 | cut -d'-' -f4 | cut -d '.' -f1)
# restore DB
docker compose -f docker-compose-dev.yml run --rm \
-v $(shell pwd)/backup/:/backup \
postgres sh -c 'exec pg_restore -c -hpostgres -U${DATABASE_USER} -Ox -Ft -d${DB_NAME} `ls -t /backup/*.sql.tar | head -1`'
# restart web container
make restart-web
release:
# make sure we are in master
python update_release.py check --branch=master
# update versions and ask for confirmation
python update_release.py
python update_release.py confirm
# create branch and tag
git checkout -b release-$(VERSION)
git add .
git commit -m "Prepared release $(VERSION)"
git push --set-upstream origin release-$(VERSION)
git tag $(VERSION)
git tag -f qa-release
git push --tags --force
# updating CHANGELOG
make update-changelog
# create github release
python update_release.py publish
# cancel pre-update of versions
git checkout infoscience_exports/exports/versions.py
# git merge master
git checkout master
git merge release-$(VERSION)
git push
push-qa:
# update tags
git tag -f qa-release
git push --tags --force
# updating CHANGELOG
make update-changelog
push-prod:
@# confirm push to production
@python update_release.py confirm --prod
# update tags
git tag -f prod-release
git push --tags --force
# updating CHANGELOG
make update-changelog
update-changelog:
# updating CHANGELOG
github_changelog_generator -u ${GITHUB_OWNER} -p ${GITHUB_REPO}
# commit master
git add CHANGELOG.md
git commit -m "updated CHANGELOG"
git push
deploy: dump
git pull
# update docker images
docker compose -f docker-compose-dev.yml build
# restart containers
make restart
# update DB
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py migrate
# update languages
make collectstatic
make compilemessages
# restart web container
make restart-web
fast-test: check-env
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py test exports --settings=settings.test --noinput --failfast --keepdb
test: check-env
docker compose -f docker-compose-dev.yml exec web \
flake8 infoscience_exports/exports --max-line-length=120 --exclude=migrations
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py test exports --settings=settings.test --noinput
test-render: check-env
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py test --failfast exports.test.test_crud_views:ExportRenderTest --settings=settings.test-silent-coverage --noinput
shell:
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py shell_plus
bash:
docker compose -f docker-compose-dev.yml exec web \
bash
coverage: check-env
flake8 infoscience_exports/exports --max-line-length=120 --exclude=migrations
pytest --cov=infoscience_exports infoscience_exports/exports/pytests
coverage html
open htmlcov/index.html
build-travis:
docker compose -f docker-compose-dev.yml build
docker compose -f docker-compose-dev.yml up -d
test-travis:
flake8 infoscience_exports/exports --max-line-length=120 --exclude=migrations
python infoscience_exports/manage.py test exports --settings=settings.test --noinput
coverage xml
migration-load-dump:
docker compose -f docker-compose-dev.yml exec web \
python infoscience_exports/manage.py loaddata --app exporter exports_from_32
migration-build-delta:
docker compose -f docker-compose-dev.yml exec web python infoscience_exports/manage.py add_quality_content_comparaison
migration-migrate:
docker compose -f docker-compose-dev.yml exec web python infoscience_exports/manage.py \
migrate_from_legacy --jahia_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/infoscience-prod-jahia.csv.extended.csv \
--people_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/infoscience-people-actif-only.csv.extended.csv \
--ids_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/ids_to_migrate.csv
migration-migrate-selective-with-subset:
docker compose -f docker-compose-dev.yml exec web python infoscience_exports/manage.py \
migrate_from_legacy --jahia_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/infoscience-prod-jahia.csv.extended.csv \
--people_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/infoscience-people-actif-only.csv.extended.csv \
--ids_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/ids_to_migrate.csv \
--subset_only 1
migration-migrate-all:
docker compose -f docker-compose-dev.yml exec web python infoscience_exports/manage.py \
migrate_from_legacy --jahia_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/infoscience-prod-jahia.csv.extended.csv \
--people_csv_path /usr/src/app/infoscience_exports/exporter/fixtures/infoscience-people-actif-only.csv.extended.csv \
--migrate_all 1
migration-post-generate-csv:
docker compose -f docker-compose-dev.yml exec web python infoscience_exports/manage.py legacy_url_old_to_new \
--ids_csv_path "/usr/src/app/infoscience_exports/exporter/fixtures/ids_to_migrate.csv" \
--jahia_csv_path "/var/log/django/infoscience_exports_new_url_jahia.csv" \
--people_csv_path "/var/log/django/infoscience_exports_new_url_people.csv " \
--all_csv_path "/var/log/django/infoscience_exports_all_new_url.csv"
migration-post-generate-csv-all:
docker compose -f docker-compose-dev.yml exec web python infoscience_exports/manage.py legacy_url_old_to_new \
--jahia_csv_path "/var/log/django/infoscience_exports_new_url_jahia.csv" \
--people_csv_path "/var/log/django/infoscience_exports_new_url_people.csv " \
--all_csv_path "/var/log/django/infoscience_exports_all_new_url.csv"
setup-openshift:check-env
./ansible/exportsible
check-env:
ifeq ($(wildcard .env),)
@echo "Please create your .env file first, from .env.sample or by running make venv"
@exit 1
else
include .env
export
endif
show-app-url:
@echo '${SITE_URL}'