-
Notifications
You must be signed in to change notification settings - Fork 14
/
Copy pathMakefile
698 lines (587 loc) · 28.7 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
##############################################
# WARNING : THIS FILE SHOULDN'T BE TOUCHED #
# FOR ENVIRONNEMENT CONFIGURATION #
# CONFIGURABLE VARIABLES SHOULD BE OVERRIDED #
# IN THE 'artifacts' FILE, AS NOT COMMITTED #
##############################################
SHELL=/bin/bash
OS_TYPE := $(shell cat /etc/os-release | grep -E '^NAME=' | sed 's/^.*debian.*$$/DEB/I;s/^.*ubuntu.*$$/DEB/I;s/^.*fedora.*$$/RPM/I;s/.*centos.*$$/RPM/I;')
export APP_DNS=tuto.matchid.io
export DEBIAN_FRONTEND=noninteractive
export USE_TTY := $(shell test -t 1 && USE_TTY="-t")
#matchID default exposition port
export APP_GROUP=matchID
export APP=backend
export APP_PATH=$(shell pwd)
export API_PATH=${APP_GROUP}/api/v0
export API_TEST_PATH=${API_PATH}/swagger.json
export API_TEST_JSON_PATH=swagger
export PORT=8081
export BACKEND_PORT=8081
export TIMEOUT=30
export BACKUP_TIMEOUT=1800
# auth method - do not use auth by default (auth can be both passwords and OAuth)
export NO_AUTH=True
export TWITTER_OAUTH_ID=None
export TWITTER_OAUTH_SECRET=None
export FACEBOOK_OAUTH_ID=None
export FACEBOOK_OAUTH_SECRET=None
export GITHUB_OAUTH_ID=fd8e86cc09d3f9607e16
export GITHUB_OAUTH_SECRET=203010f81158d3ceab0297a213e80bc0fbfe7f8e
#matchID default paths
export BACKEND := $(shell pwd)
export UPLOAD=${BACKEND}/upload
export PROJECTS=${BACKEND}/projects
export EXAMPLES=${BACKEND}/../examples
export TUTORIAL=${BACKEND}/../tutorial
export MODELS=${BACKEND}/models
export LOG=${BACKEND}/log
export COMPOSE_HTTP_TIMEOUT=120
export DOCKER_USERNAME=$(shell echo ${APP_GROUP} | tr '[:upper:]' '[:lower:]')
export DC_DIR=${BACKEND}/docker-components
export DC_FILE=${DC_DIR}/docker-compose
export DC_PREFIX := $(shell echo ${APP_GROUP} | tr '[:upper:]' '[:lower:]')
export DC_IMAGE_NAME=${DC_PREFIX}-${APP}
export DC_NETWORK=${DC_PREFIX}
export DC_NETWORK_OPT=
export DC_BUILD_ARGS = --pull --no-cache
export GIT_ROOT=https://github.com/matchid-project
export GIT_ORIGIN=origin
export GIT_BRANCH := $(shell [ -f "/usr/bin/git" ] && git branch | grep '*' | awk '{print $$2}')
export GIT_BRANCH_MASTER=master
export GIT_TOOLS=tools
export GIT_FRONTEND=frontend
export GIT_FRONTEND_BRANCH:=$(shell [ "${GIT_BRANCH}" = "${GIT_BRANCH_MASTER}" ] && echo -n "${GIT_BRANCH_MASTER}" || echo -n dev)
export FRONTEND=${BACKEND}/../${GIT_FRONTEND}
export FRONTEND_DC_IMAGE_NAME=${DC_PREFIX}-${GIT_FRONTEND}
export API_SECRET_KEY:=$(shell openssl rand -base64 24)
export ADMIN_PASSWORD:=$(shell openssl rand -base64 24)
export ADMIN_PASSWORD_HASH:=$(shell echo -n ${ADMIN_PASSWORD} | sha384sum | sed 's/\s*\-.*//')
export POSTGRES_PASSWORD=matchid
# backup dir
export BACKUP_DIR=${BACKEND}/backup
# s3 conf
# s3 conf has to be stored in two ways :
# classic way (.aws/config and .aws/credentials) for s3 backups
# to use within matchid backend, you have to add credential as env variables and declare configuration in a s3 connector
# export aws_access_key_id=XXXXXXXXXXXXXXXXX
# export aws_secret_access_key=XXXXXXXXXXXXXXXXXXXXXXXXXXX
export MATCHID_DATA_BUCKET=$(shell echo ${APP_GROUP} | tr '[:upper:]' '[:lower:]')
export MATCHID_CONFIG_BUCKET=$(shell echo ${APP_GROUP} | tr '[:upper:]' '[:lower:]')
# elasticsearch defaut configuration
export ES_INDEX=${APP_GROUP}
export ES_NODES = 1 # elasticsearch number of nodes
export ES_SWARM_NODE_NUMBER = 2 # elasticsearch number of nodes
export ES_MEM := 1024m # elasticsearch : memory of each node
export ES_JAVA_OPTS := -Xms${ES_MEM} -Xmx${ES_MEM} # elasticsearch : java options
export ES_VERSION = 8.6.1
export ES_DATA = ${BACKEND}/esdata
export ES_THREADS = 2
export ES_MAX_TRIES = 3
export ES_CHUNK = 500
export ES_BACKUP_NAME := $(shell echo esdata_`date +"%Y%m%d"`)
export ES_BACKUP_FILE := ${ES_BACKUP_NAME}.tar
export ES_BACKUP_FILE_SNAR = ${ES_BACKUP_NAME}.snar
export DB_SERVICES=elasticsearch postgres
export SERVICES=${DB_SERVICES} backend frontend
-include ${APP_PATH}/${GIT_TOOLS}/artifacts.SCW
dummy := $(shell touch artifacts)
include ./artifacts
tag := $(shell [ -f "/usr/bin/git" ] && git describe --tags | sed 's/-.*//')
version := $(shell export LC_COLLATE=C;export LC_ALL=C;cat tagfiles.version | xargs -I '{}' find {} -type f | egrep -v 'conf/security/(github|facebook|twitter).yml$$|.tar.gz$$|.pyc$$|.gitignore$$' | sort | xargs cat | sha1sum - | sed 's/\(......\).*/\1/')
export APP_VERSION = ${tag}-${version}
commit = ${APP_VERSION}
lastcommit := $(shell touch .lastcommit && cat .lastcommit)
date := $(shell date -I)
id := $(shell openssl rand -base64 8)
vm_max_count := $(shell cat /etc/sysctl.conf | egrep vm.max_map_count\s*=\s*262144 && echo true)
PG := 'postgres'
DC := 'docker-compose'
include /etc/os-release
test:
echo "${DC_LOCAL}" | base64 -d > docker-compose-local.yml;\
echo "${OAUTH_CREDS_ENC}" | base64 -d | gpg -d --passphrase ${SSHPWD} --batch > creds-local.yml
version: frontend-version
@echo ${APP_GROUP} ${APP} ${APP_VERSION}
frontend-version:
@if [ -d "${FRONTEND}" ];then\
cd ${FRONTEND} && make -s version;\
fi
version-files:
@export LC_COLLATE=C;export LC_ALL=C;cat tagfiles.version | xargs -I '{}' find {} -type f | egrep -v 'conf/security/(github|facebook|twitter).yml$$|.tar.gz$$|.pyc$$|.gitignore$$' | sort
config:
@if [ ! -f "/usr/bin/git" ];then\
if [ "${OS_TYPE}" = "DEB" ]; then\
sudo apt-get install git -yq;\
fi;\
if [ "${OS_TYPE}" = "RPM" ]; then\
sudo yum install -y git;\
fi;\
fi
@if [ -z "${TOOLS_PATH}" ];then\
if [ ! -f "${APP_PATH}/${GIT_TOOLS}" ]; then\
git clone -q ${GIT_ROOT}/${GIT_TOOLS};\
fi;\
make -C ${APP_PATH}/${GIT_TOOLS} config ${MAKEOVERRIDES};\
else\
ln -s ${TOOLS_PATH} ${APP_PATH}/${GIT_TOOLS};\
fi
cp artifacts ${APP_PATH}/${GIT_TOOLS}/
@touch config
config-clean:
@rm -rf tools config
docker-clean: stop
docker container rm matchid-build-front matchid-nginx elasticsearch postgres kibana
clean: frontend-clean config-clean
network-stop:
docker network rm ${DC_NETWORK}
network: config
@docker network create ${DC_NETWORK_OPT} ${DC_NETWORK} 2> /dev/null; true
elasticsearch-dev-stop: elasticsearch-stop
elasticsearch-docker-check:
@if [ ! -f ".docker.elastic.co-elasticsearch:${ES_VERSION}" ]; then\
(\
(docker image inspect docker.elastic.co/elasticsearch/elasticsearch:${ES_VERSION} > /dev/null 2>&1)\
&& touch .docker.elastic.co-elasticsearch:${ES_VERSION}\
)\
||\
(\
(docker pull docker.elastic.co/elasticsearch/elasticsearch:${ES_VERSION} 2> /dev/null)\
&& touch .docker.elastic.co-elasticsearch:${ES_VERSION}\
)\
|| (echo no image found for docker.elastic.co/elasticsearch/elasticsearch:${ES_VERSION} && exit 1);\
fi;
elasticsearch-stop:
@echo docker-compose down matchID elasticsearch
ifeq "$(ES_NODES)" "1"
@${DC} -f ${DC_FILE}-elasticsearch-phonetic.yml down
else
@${DC} -f ${DC_FILE}-elasticsearch-huge.yml down
endif
elasticsearch2-stop:
@${DC} -f ${DC_FILE}-elasticsearch-huge-remote.yml down
elasticsearch-repository-plugin: elasticsearch-start
@if [ ! -f "elasticsearch-repository-plugin" ]; then\
echo installing elasticsearch repository plugin;\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch sh -c \
"echo ${STORAGE_ACCESS_KEY} | bin/elasticsearch-keystore add --stdin --force s3.client.default.access_key";\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch sh -c \
"echo ${STORAGE_SECRET_KEY} | bin/elasticsearch-keystore add --stdin --force s3.client.default.secret_key";\
docker restart ${DC_PREFIX}-elasticsearch;\
timeout=${TIMEOUT} ; ret=1 ; until [ "$$timeout" -le 0 -o "$$ret" -eq "0" ] ; do (docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch curl -s --fail -XGET localhost:9200/ > /dev/null) ; ret=$$? ; if [ "$$ret" -ne "0" ] ; then echo -en "\rwaiting for elasticsearch API to start $$timeout" ; fi ; ((timeout--)); sleep 1 ; done ;\
echo; touch elasticsearch-repository-plugin ; exit $$ret;\
fi;
elasticsearch-repository-config: elasticsearch-repository-plugin
@if [ ! -f "elasticsearch-repository-config" ]; then\
echo creating elasticsearch repository ${APP_GROUP} in s3 bucket ${REPOSITORY_BUCKET} && \
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XPUT "localhost:9200/_snapshot/${APP_GROUP}" -H 'Content-Type: application/json' \
-d '{"type": "s3","settings": {"bucket": "${REPOSITORY_BUCKET}","client": "default","region": "${SCW_REGION}","endpoint": "${SCW_ENDPOINT}","path_style_access": true,"protocol": "https"}}' \
| grep -q '"acknowledged":true' && touch elasticsearch-repository-config;\
fi
elasticsearch-freeze:
@\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XPUT "localhost:9200/${ES_INDEX}/_settings" -H 'Content-Type: application/json' \
-d '{"index":{"blocks.write": true}}' | grep -q '"acknowledged":true' \
&& echo "index ${ES_INDEX} frozen" \
|| echo "index ${ES_INDEX} freeze failed"
elasticsearch-repository-backup: elasticsearch-repository-config elasticsearch-freeze
@\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XPUT "localhost:9200/_snapshot/${APP_GROUP}/${ES_BACKUP_NAME}" -H 'Content-Type: application/json'\
-d '{"indices": "${ES_INDEX}", "ignore_unavailable": true, "include_global_state": false}' \
| grep -q '{"accepted":true}';\
if [ "$$?" -ne "0" ]; then\
echo "snapshot ${ES_BACKUP_NAME} creation failed";\
exit 1;\
fi;\
echo -n creating snapshot ${ES_BACKUP_NAME} in elasticsearch repository;\
timeout=${BACKUP_TIMEOUT} ; ret=1 ; dot_count=0 ;\
until [ "$$timeout" -le 0 -o "$$ret" -eq "0" ] ; do\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XGET "localhost:9200/_snapshot/${APP_GROUP}/${ES_BACKUP_NAME}"\
| grep -q '"state":"SUCCESS"';\
ret=$$? ; \
if [ "$$ret" -ne "0" ] ; then\
echo -en "." ; \
((dot_count++));\
if [ "$$dot_count" -gt "10" ]; then\
echo -en "\rwaiting for snapshot ${ES_BACKUP_NAME} to complete $$timeout" ;\
dot_count=0;\
fi;\
fi ;\
((timeout--));((timeout--)); sleep 2 ; \
done ; echo ;\
if [ "$$ret" -ne "0" ]; then\
echo "snapshot ${ES_BACKUP_NAME} creation failed";\
exit $$ret;\
fi;\
echo "snapshot ${ES_BACKUP_NAME} created in elasticsearch repository" && touch elasticsearch-repository-backup
elasticsearch-repository-backup-async: elasticsearch-repository-config
@docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XPUT "localhost:9200/_snapshot/${APP_GROUP}/${ES_BACKUP_NAME}" -H 'Content-Type: application/json'\
-d '{"indices": "${ES_INDEX}", "ignore_unavailable": true, "include_global_state": false}'
elasticsearch-repository-delete: elasticsearch-repository-config
@(\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XDELETE "localhost:9200/_snapshot/${APP_GROUP}/${ES_BACKUP_NAME}"\
> /dev/null 2>&1\
) && echo "snapshot ${ES_BACKUP_NAME} deleted from elasticsearch repository"
elasticsearch-repository-list: elasticsearch-repository-config
@docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XGET "localhost:9200/_snapshot/${APP_GROUP}/_all"\
| jq -r '.snapshots[].snapshot'
elasticsearch-repository-restore: elasticsearch-repository-config
@echo restoring snapshot ${ES_BACKUP_NAME} from elasticsearch repository;\
(\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XPOST localhost:9200/_snapshot/${APP_GROUP}/${ES_BACKUP_NAME}/_restore?wait_for_completion=true -H 'Content-Type: application/json'\
-d '{"indices": "${ES_INDEX}","ignore_unavailable": true,"include_global_state": false}' \
> /dev/null 2>&1\
) && echo "snapshot ${ES_BACKUP_NAME} restored from elasticsearch repository" && touch elasticsearch-repository-restore
elasticsearch-repository-check: elasticsearch-repository-config backup-dir
@if [ ! -f "${BACKUP_DIR}/${ES_BACKUP_NAME}.check" ]; then\
(\
docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch \
curl -s -XGET "localhost:9200/_snapshot/${APP_GROUP}/_all" \
| jq -r '.snapshots[].snapshot' | grep -q "${ES_BACKUP_NAME}" \
) > /dev/null 2>&1 \
&& echo "snapshot found for or ${ES_BACKUP_NAME} in elasticsearch repository" && mkdir -p touch "${BACKUP_DIR}/${ES_BACKUP_NAME}.check" \
|| (echo "no snapshot found for ${ES_BACKUP_NAME} in elasticsearch repository")\
fi
elasticsearch-backup: elasticsearch-stop backup-dir
@echo taring ${ES_DATA} to ${BACKUP_DIR}/${ES_BACKUP_FILE}
@cd $$(dirname ${ES_DATA}) && sudo tar --create --file=${BACKUP_DIR}/${ES_BACKUP_FILE} --listed-incremental=${BACKUP_DIR}/${ES_BACKUP_FILE_SNAR} $$(basename ${ES_DATA})
elasticsearch-restore: elasticsearch-stop backup-dir
@if [ -d "$(ES_DATA)" ] ; then (echo purgin ${ES_DATA} && sudo rm -rf ${ES_DATA} && echo purge done) ; fi
@if [ ! -f "${BACKUP_DIR}/${ES_BACKUP_FILE}" ] ; then (echo no such archive "${BACKUP_DIR}/${ES_BACKUP_FILE}" && exit 1);fi
@echo restoring from ${BACKUP_DIR}/${ES_BACKUP_FILE} to ${ES_DATA} && \
cd $$(dirname ${ES_DATA}) && \
sudo tar --extract --listed-incremental=/dev/null --file ${BACKUP_DIR}/${ES_BACKUP_FILE} && \
echo backup restored
elasticsearch-storage-push:
@if [ ! -f "${BACKUP_DIR}/${ES_BACKUP_FILE}" ] ; then (echo no archive to push: "${BACKUP_DIR}/${ES_BACKUP_FILE}" && exit 1);fi
@make -C ${APP_PATH}/${GIT_TOOLS} storage-push\
FILE=${BACKUP_DIR}/${ES_BACKUP_FILE}\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}
@make -C ${APP_PATH}/${GIT_TOOLS} storage-push\
FILE=${BACKUP_DIR}/${ES_BACKUP_FILE_SNAR}\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}
elasticsearch-storage-pull: backup-dir
@echo pulling ${BUCKET}/${ES_BACKUP_FILE}
@make -C ${APP_PATH}/${GIT_TOOLS} storage-pull\
FILE=${ES_BACKUP_FILE} DATA_DIR=${BACKUP_DIR}\
STORAGE_BUCKET=${STORAGE_BUCKET} STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY}
backup-dir:
@if [ ! -d "$(BACKUP_DIR)" ] ; then mkdir -p $(BACKUP_DIR) ; fi
vm_max:
ifeq ("$(vm_max_count)", "")
@echo updating vm.max_map_count $(vm_max_count) to 262144
sudo sysctl -w vm.max_map_count=262144
endif
elasticsearch-dev: elasticsearch
elasticsearch-cluster: network vm_max
@echo docker-compose up matchID elasticsearch with ${ES_NODES} nodes
@cat ${DC_FILE}-elasticsearch.yml > ${DC_FILE}-elasticsearch-huge.yml
@(if [ ! -d ${ES_DATA}/node1 ]; then sudo mkdir -p ${ES_DATA}/node1 ; sudo chmod g+rw ${ES_DATA}/node1/.; sudo chown 1000:1000 ${ES_DATA}/node1/.; fi)
@(i=$(ES_NODES); while [ $${i} -gt 1 ]; \
do \
if [ ! -d ${ES_DATA}/node$$i ]; then (echo ${ES_DATA}/node$$i && sudo mkdir -p ${ES_DATA}/node$$i && sudo chmod g+rw ${ES_DATA}/node$$i/. && sudo chown 1000:1000 ${ES_DATA}/node$$i/.); fi; \
cat ${DC_FILE}-elasticsearch-node.yml | sed "s/%N/$$i/g;s/%MM/${ES_MEM}/g;s/%M/${ES_MEM}/g" >> ${DC_FILE}-elasticsearch-huge.yml; \
i=`expr $$i - 1`; \
done;\
true)
${DC} -f ${DC_FILE}-elasticsearch-huge.yml up -d
@timeout=${TIMEOUT} ; ret=1 ; until [ "$$timeout" -le 0 -o "$$ret" -eq "0" ] ; do (docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch curl -s --fail -XGET localhost:9200/_cat/indices > /dev/null) ; ret=$$? ; if [ "$$ret" -ne "0" ] ; then echo -en "\rwaiting for elasticsearch to start $$timeout" ; fi ; ((timeout--)); sleep 1 ; done ; echo ; exit $$ret
elasticsearch-start: network vm_max
@echo docker-compose up matchID elasticsearch with ${ES_NODES} nodes
@(if [ ! -d ${ES_DATA}/node1 ]; then sudo mkdir -p ${ES_DATA}/node1 ; sudo chmod g+rw ${ES_DATA}/node1/.; sudo chown 1000:1000 ${ES_DATA}/node1/.; fi)
${DC} -f ${DC_FILE}-elasticsearch.yml up -d
elasticsearch: elasticsearch-start
@timeout=${TIMEOUT} ; ret=1 ; until [ "$$timeout" -le 0 -o "$$ret" -eq "0" ] ; do (docker exec -i ${USE_TTY} ${DC_PREFIX}-elasticsearch curl -s --fail -XGET localhost:9200/ > /dev/null) ; ret=$$? ; if [ "$$ret" -ne "0" ] ; then echo -en "\rwaiting for elasticsearch API to start $$timeout" ; fi ; ((timeout--)); sleep 1 ; done ; echo ; exit $$ret
elasticsearch2:
@echo docker-compose up matchID elasticsearch with ${ES_NODES} nodes
@cat ${DC_FILE}-elasticsearch.yml | head -8 > ${DC_FILE}-elasticsearch-huge-remote.yml
@(i=$$(( $(ES_NODES) * $(ES_SWARM_NODE_NUMBER) ));j=$$(( $(ES_NODES) * $(ES_SWARM_NODE_NUMBER) - $(ES_NODES))); while [ $${i} -gt $${j} ]; \
do \
if [ ! -d ${ES_DATA}/node$$i ]; then (echo ${ES_DATA}/node$$i && sudo mkdir -p ${ES_DATA}/node$$i && sudo chmod g+rw ${ES_DATA}/node$$i/. && sudo chown 1000:1000 ${ES_DATA}/node$$i/.); fi; \
cat ${DC_FILE}-elasticsearch-node.yml | sed "s/%N/$$i/g;s/%MM/${ES_MEM}/g;s/%M/${ES_MEM}/g" | egrep -v 'depends_on|- elasticsearch' >> ${DC_FILE}-elasticsearch-huge-remote.yml; \
i=`expr $$i - 1`; \
done;\
true)
${DC} -f ${DC_FILE}-elasticsearch-huge-remote.yml up -d
kibana-dev-stop: kibana-stop
kibana-dev: kibana
kibana-stop:
${DC} -f ${DC_FILE}-kibana.yml down
kibana: network
ifeq ("$(wildcard ${BACKEND}/kibana)","")
sudo mkdir -p ${BACKEND}/kibana && sudo chmod g+rw ${BACKEND}/kibana/. && sudo chown 1000:1000 ${BACKEND}/kibana/.
endif
${DC} -f ${DC_FILE}-kibana.yml up -d
postgres-docker-check:
@if [ ! -f ".postgres:latest" ]; then\
(\
(docker image inspect postgres:latest > /dev/null 2>&1)\
&& touch .postgres:latest\
)\
||\
(\
(docker pull postgres:latest 2> /dev/null)\
&& touch .postgres:latest\
)\
|| (echo no image found for postgres:latest && exit 1);\
fi;
postgres-dev-stop: postgres-stop
postgres-stop:
${DC} -f ${DC_FILE}-${PG}.yml down
postgres-dev: postgres
postgres: network
${DC} -f ${DC_FILE}-${PG}.yml up -d
@sleep 5 && docker exec ${DC_PREFIX}-postgres psql -U postgres -c "CREATE EXTENSION IF NOT EXISTS fuzzystrmatch"
backend-stop:
${DC} down
backend-prep:
ifeq ("$(wildcard ${UPLOAD})","")
@sudo mkdir -p ${UPLOAD}
endif
ifeq ("$(wildcard ${PROJECTS})","")
@sudo mkdir -p ${PROJECTS}
endif
ifeq ("$(wildcard ${MODELS})","")
@sudo mkdir -p ${PROJECTS}
endif
backend-dev: network backend-prep
@echo WARNING new ADMIN_PASSWORD is ${ADMIN_PASSWORD}
@if [ -f docker-compose-local.yml ];then\
DC_LOCAL="-f docker-compose-local.yml";\
fi;\
export BACKEND_ENV=development;\
export DC_POSTFIX="-dev";\
if [ "${commit}" != "${lastcommit}" ];then\
echo building matchID backend after new commit;\
${DC} build;\
echo "${commit}" > ${BACKEND}/.lastcommit;\
fi;\
${DC} -f docker-compose.yml -f docker-compose-dev.yml $$DC_LOCAL up -d
backend-dev-stop:
${DC} -f docker-compose.yml down
backend-check-build:
@if [ -f docker-compose-local.yml ];then\
DC_LOCAL="-f docker-compose-local.yml";\
fi;\
export BACKEND_ENV=production;\
if [ "${commit}" != "${lastcommit}" ];then\
echo building ${APP_GROUP} ${APP} for dev after new commit;\
${DC} build $$DC_LOCAL;\
echo "${commit}" > ${BACKEND}/.lastcommit;\
fi;\
${DC} -f docker-compose.yml $$DC_LOCAL config -q
backend-docker-pull:
@(\
(docker pull ${DOCKER_USERNAME}/${DC_PREFIX}-${APP}:${APP_VERSION} > /dev/null 2>&1)\
&& echo docker successfully pulled && (echo "${commit}" > ${BACKEND}/.lastcommit) \
) || echo "${DOCKER_USERNAME}/${DC_PREFIX}-${APP}:${APP_VERSION} not found on Docker Hub build, using local"
backend-build: backend-prep backend-check-build backend-docker-pull
@if [ -f docker-compose-local.yml ];then\
DC_LOCAL="-f docker-compose-local.yml";\
fi;\
export BACKEND_ENV=production;\
if [ "${commit}" != "${lastcommit}" ];then\
echo building ${APP_GROUP} ${APP} after new commit;\
${DC} build ${DC_BUILD_ARGS};\
echo "${commit}" > ${BACKEND}/.lastcommit;\
fi;
@docker tag ${DOCKER_USERNAME}/${DC_PREFIX}-${APP}:${APP_VERSION} ${DOCKER_USERNAME}/${DC_PREFIX}-${APP}:latest
backend: network backend-docker-check
@echo WARNING new ADMIN_PASSWORD is ${ADMIN_PASSWORD}
@if [ -f docker-compose-local.yml ];then\
DC_LOCAL="-f docker-compose-local.yml";\
fi;\
export BACKEND_ENV=production;\
${DC} -f docker-compose.yml $$DC_LOCAL up -d
@timeout=${TIMEOUT} ; ret=1 ; until [ "$$timeout" -le 0 -o "$$ret" -eq "0" ] ; do (docker exec -i ${USE_TTY} ${DC_PREFIX}-backend curl -s --noproxy "*" --fail -XGET localhost:${BACKEND_PORT}/matchID/api/v0/ > /dev/null) ; ret=$$? ; echo;if [ "$$ret" -ne "0" ] ; then echo -en "\rwaiting for backend to start $$timeout" ; fi ; ((timeout--)); sleep 1 ; done ; echo ; exit $$ret
backend-docker-check: config
@make -C ${APP_PATH}/${GIT_TOOLS} docker-check DC_IMAGE_NAME=${DC_IMAGE_NAME} APP_VERSION=${APP_VERSION} GIT_BRANCH="${GIT_BRANCH}" ${MAKEOVERRIDES}
backend-docker-push:
@make -C ${APP_PATH}/${GIT_TOOLS} docker-push DC_IMAGE_NAME=${DC_IMAGE_NAME} APP_VERSION=${APP_VERSION} ${MAKEOVERRIDES}
backend-update:
@cd ${BACKEND}; git pull ${GIT_ORIGIN} "${GIT_BRANCH}"
update: frontend-update backend-update
services-dev:
@for service in ${SERVICES}; do\
(make $$service-dev ${MAKEOVERRIDES} || echo starting $$service failed);\
done
services-dev-stop:
@for service in ${SERVICES}; do\
(make $$service-dev-stop ${MAKEOVERRIDES} || echo stopping $$service failed);\
done
services:
@for service in ${SERVICES}; do\
(make $$service ${MAKEOVERRIDES} || echo starting $$service failed);\
done
services-stop:
@for service in ${SERVICES}; do\
(make $$service-stop ${MAKEOVERRIDES} || echo stopping $$service failed);\
done
dev: network services-dev
dev-stop: services-dev-stop network-stop
frontend-config:
ifeq ("$(wildcard ${FRONTEND})","")
@echo downloading frontend code
@git clone -q ${GIT_ROOT}/${GIT_FRONTEND} ${FRONTEND} #2> /dev/null; true
@cd ${FRONTEND};git checkout "${GIT_FRONTEND_BRANCH}"
endif
ifeq ("$(wildcard ${FRONTEND}/${GIT_TOOLS})","")
@ln -s ${APP_PATH}/${GIT_TOOLS} ${FRONTEND}/${GIT_TOOLS}
endif
frontend-docker-check: frontend-config
@make -C ${FRONTEND} frontend-docker-check GIT_BRANCH="${GIT_FRONTEND_BRANCH}"
frontend-clean:
@if [ -d "${FRONTEND}" ];then\
make -C ${FRONTEND} frontend-clean GIT_BRANCH="${GIT_FRONTEND_BRANCH}";\
fi;
frontend-update:
@cd ${FRONTEND}; git pull ${GIT_ORIGIN} "${GIT_FRONTEND_BRANCH}"
frontend-dev: frontend-config
@make -C ${FRONTEND} frontend-dev GIT_BRANCH="${GIT_FRONTEND_BRANCH}"
frontend-dev-stop:
@if [ -d "${FRONTEND}" ];then\
make -C ${FRONTEND} frontend-dev-stop GIT_BRANCH="${GIT_FRONTEND_BRANCH}";\
fi
frontend-build: network frontend-config
@make -C ${FRONTEND} frontend-build GIT_BRANCH="${GIT_FRONTEND_BRANCH}"
frontend-stop:
@if [ -d "${FRONTEND}" ];then\
make -C ${FRONTEND} frontend-stop GIT_BRANCH="${GIT_FRONTEND_BRANCH}";\
fi
frontend: frontend-docker-check
@make -C ${FRONTEND} frontend GIT_BRANCH="${GIT_FRONTEND_BRANCH}"
stop: services-stop network-stop
@echo all components stopped
start: network services
@sleep 2 && docker-compose logs
up: start
down: stop
restart: down up
docker-save-all: config backend-docker-check frontend-docker-check postgres-docker-check elasticsearch-docker-check
@if [ ! -f "${DC_DIR}/${DC_PREFIX}-${APP}:${APP_VERSION}.tar.gz" ];then\
echo saving backend docker image;\
docker save ${DOCKER_USERNAME}/${DC_PREFIX}-${APP}:${APP_VERSION} | gzip > ${DC_DIR}/${DC_PREFIX}-${APP}:${APP_VERSION}.tar.gz;\
fi
@if [ ! -f "${DC_DIR}/elasticsearch:${ES_VERSION}.tar.gz" ];then\
echo saving elasticsearch docker image;\
docker save docker.elastic.co/elasticsearch/elasticsearch:${ES_VERSION} | gzip > ${DC_DIR}/elasticsearch:${ES_VERSION}.tar.gz;\
fi
@if [ ! -f "${DC_DIR}/postgres:latest.tar.gz" ];then\
echo saving postgres docker image;\
docker save postgres:latest | gzip > ${DC_DIR}/postgres:latest.tar.gz;\
fi
@FRONTEND_APP_VERSION=$$(cd ${FRONTEND} && make -s version | awk '{print $$NF}');\
if [ ! -f "${DC_DIR}/${FRONTEND_DC_IMAGE_NAME}:$$FRONTEND_APP_VERSION.tar.gz" ];then\
echo saving frontend docker image;\
docker save ${DOCKER_USERNAME}/${FRONTEND_DC_IMAGE_NAME}:$$FRONTEND_APP_VERSION | gzip > ${DC_DIR}/${FRONTEND_DC_IMAGE_NAME}:$$FRONTEND_APP_VERSION.tar.gz;\
fi
package: docker-save-all
@FRONTEND_APP_VERSION=$$(cd ${FRONTEND} && make -s version | awk '{print $$NF}');\
PACKAGE=${APP_GROUP}-${APP_VERSION}-$$FRONTEND_APP_VERSION.tar.gz;\
if [ ! -f "$$PACKAGE" ];then\
curl -s -O https://downloads.rclone.org/rclone-current-linux-amd64.rpm;\
curl -s -O https://downloads.rclone.org/rclone-current-linux-amd64.deb;\
curl -s -L "https://github.com/docker/compose/releases/download/1.27.4/docker-compose-$$(uname -s)-$$(uname -m)" -o docker-compose;\
cd ${APP_PATH}/..;\
DC_DIR=`echo ${DC_DIR} | sed "s|${APP_PATH}|$${APP_PATH##*/}|"`;\
echo $$DD;\
tar cvzf $${APP_PATH##*/}/$$PACKAGE \
$${APP_PATH##*/}/rclone-current-linux*\
$${APP_PATH##*/}/docker-compose\
`cd ${APP_PATH};git ls-files | sed "s/^/$${APP_PATH##*/}\//"` \
$${APP_PATH##*/}/.git\
$$DC_DIR/postgres:latest.tar.gz\
$$DC_DIR/${FRONTEND_DC_IMAGE_NAME}:$$FRONTEND_APP_VERSION.tar.gz\
$$DC_DIR/${DC_PREFIX}-${APP}:${APP_VERSION}.tar.gz\
$$DC_DIR/elasticsearch:${ES_VERSION}.tar.gz\
`cd ${APP_PATH}/${GIT_TOOLS};git ls-files | sed "s/^/$${APP_PATH##*/}\/${GIT_TOOLS}\//"`\
$${APP_PATH##*/}/${GIT_TOOLS}/.git\
`cd ${FRONTEND};git ls-files | sed "s/^/$${FRONTEND##*/}\//"`\
$${FRONTEND##*/}/.git;\
fi
package-publish: package
@FRONTEND_APP_VERSION=$$(cd ${FRONTEND} && make -s version | awk '{print $$NF}');\
PACKAGE=${APP_GROUP}-${APP_VERSION}-$$FRONTEND_APP_VERSION.tar.gz;\
make -C ${APP_PATH}/${GIT_TOOLS} storage-push\
FILE=${APP_PATH}/$$PACKAGE\
STORAGE_OPTIONS="--s3-acl=public-read"\
STORAGE_BUCKET=matchid-dist STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY};\
if [ "${GIT_BRANCH}" = "${GIT_BRANCH_MASTER}" ]; then\
ln -s $$PACKAGE ${APP_PATH}/${APP_GROUP}-latest.tar.gz;\
make -C ${APP_PATH}/${GIT_TOOLS} storage-push\
FILE=${APP_PATH}/${APP_GROUP}-latest.tar.gz\
STORAGE_OPTIONS="--copy-links --s3-acl=public-read"\
STORAGE_BUCKET=matchid-dist STORAGE_ACCESS_KEY=${STORAGE_ACCESS_KEY} STORAGE_SECRET_KEY=${STORAGE_SECRET_KEY};\
fi
depackage:
@if [ ! -f "/usr/bin/rclone" ]; then\
if [ "${OS_TYPE}" = "DEB" ]; then\
sudo dpkg -i rclone-current-linux-amd64.deb;\
fi;\
if [ "${OS_TYPE}" = "RPM" ]; then\
sudo yum localinstall -y rclone-current-linux-amd64.rpm;\
fi;\
fi
@if [ -z "$(wildcard /usr/bin/docker-compose /usr/local/bin/docker-compose)" ];then\
mkdir -p ${HOME}/.local/bin && cp docker-compose ${HOME}/.local/bin/docker-compose;\
chmod +x ${HOME}/.local/bin/docker-compose;\
fi;
@make config
@ls ${DC_DIR}/*.tar.gz | xargs -L 1 sudo -u $$USER docker load -i;
@echo you can now start all service using 'make up';
logs: backend
@docker logs ${DC_PREFIX}-${APP}
example-download:
@echo downloading example code
@mkdir -p ${EXAMPLES}
@cd ${EXAMPLES}; git clone -q https://github.com/matchID-project/examples . ; true
@mv projects _${date}_${id}_projects 2> /dev/null; true
@mv upload _${date}_${id}_upload 2> /dev/null; true
@ln -s ${EXAMPLES}/projects ${BACKEND}/projects
@ln -s ${EXAMPLES}/data ${BACKEND}/upload
recipe-run: backend
docker exec -i ${USE_TTY} ${DC_PREFIX}-backend curl -s -XPUT http://localhost:${PORT}/matchID/api/v0/recipes/${RECIPE}/run && echo ${RECIPE} run
deploy-local: config up local-test-api
local-test-api:
@make -C ${APP_PATH}/${GIT_TOOLS} local-test-api \
PORT=${PORT} \
API_TEST_PATH=${API_TEST_PATH} API_TEST_JSON_PATH=${API_TEST_JSON_PATH} API_TEST_DATA=''\
${MAKEOVERRIDES}
deploy-remote-instance: config frontend-config
@FRONTEND_APP_VERSION=$$(cd ${FRONTEND} && make -s version | awk '{print $$NF}');\
make -C ${APP_PATH}/${GIT_TOOLS} remote-config\
APP=${APP} APP_VERSION=${APP_VERSION} CLOUD_TAG=front:$$FRONTEND_APP_VERSION-back:${APP_VERSION}\
DC_IMAGE_NAME=${DC_IMAGE_NAME}\
GIT_BRANCH="${GIT_BRANCH}" ${MAKEOVERRIDES}
deploy-remote-services:
@make -C ${APP_PATH}/${GIT_TOOLS} remote-deploy remote-actions\
APP=${APP} APP_VERSION=${APP_VERSION}\
ACTIONS="config up" SERVICES="elasticsearch postgres backend" GIT_BRANCH="${GIT_BRANCH}" ${MAKEOVERRIDES}
@FRONTEND_APP_VERSION=$$(cd ${FRONTEND} && make -s version | awk '{print $$NF}');\
make -C ${APP_PATH}/${GIT_TOOLS} remote-deploy remote-actions\
APP=${GIT_FRONTEND} APP_VERSION=$$FRONTEND_APP_VERSION DC_IMAGE_NAME=${FRONTEND_DC_IMAGE_NAME}\
ACTIONS="${GIT_FRONTEND}" GIT_BRANCH="${GIT_FRONTEND_BRANCH}" ${MAKEOVERRIDES}
deploy-remote-publish:
@if [ -z "${NGINX_HOST}" -o -z "${NGINX_USER}" ];then\
(echo "can't deploy without NGINX_HOST and NGINX_USER" && exit 1);\
fi;
make -C ${APP_PATH}/${GIT_TOOLS} remote-test-api-in-vpc nginx-conf-apply remote-test-api\
APP=${APP} APP_VERSION=${APP_VERSION} GIT_BRANCH="${GIT_BRANCH}" PORT=${PORT}\
APP_DNS=${APP_DNS} API_TEST_PATH=${API_TEST_PATH} API_TEST_JSON_PATH=${API_TEST_JSON_PATH} API_TEST_DATA=''\
${MAKEOVERRIDES}
deploy-delete-old:
@make -C ${APP_PATH}/${GIT_TOOLS} cloud-instance-down-invalid\
APP=${APP} APP_VERSION=${APP_VERSION} GIT_BRANCH="${GIT_BRANCH}" ${MAKEOVERRIDES}
deploy-monitor:
@make -C ${APP_PATH}/${GIT_TOOLS} remote-install-monitor-nq NQ_TOKEN=${NQ_TOKEN} ${MAKEOVERRIDES}
deploy-remote: config deploy-remote-instance deploy-remote-services deploy-remote-publish deploy-delete-old deploy-monitor
clean-remote:
@make -C ${APP_PATH}/${GIT_TOOLS} remote-clean ${MAKEOVERRIDES} > /dev/null 2>&1 || true