1111# SCHEDULE_RUN_TIME default should be set to 00:00
1212# need to mount backup dir to POSTGRES_DUMP_PATH
1313FROM ubuntu:20.04 AS transform
14-
1514RUN apt-get update && apt-get install -y wget openjdk-8-jdk gnupg
1615RUN apt-get update && apt-get install curl -y
17-
1816RUN echo "deb https://repo.scala-sbt.org/scalasbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list
1917RUN curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | apt-key add
2018# RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
2119RUN apt-get update && apt-get install -y sbt
2220COPY ["map-pipeline" , "map-pipeline" ]
2321WORKDIR map-pipeline
2422RUN sbt assembly
25-
2623FROM ubuntu:20.04
27-
2824RUN apt-get update && apt-get install -y wget curl
29-
3025RUN curl -sSL https://get.haskellstack.org/ | sh
3126COPY ["map-pipeline-schema" , "map-pipeline-schema" ]
3227WORKDIR map-pipeline-schema
3328RUN stack build
34-
3529WORKDIR /
3630RUN mkdir data
37-
3831RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y wget gnupg git tzdata
39-
40- RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ bionic-pgdg main" | tee -a /etc/apt/sources.list.d/pgdg.list
41-
32+ RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ focal-pgdg main" | tee -a /etc/apt/sources.list.d/pgdg.list
4233RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
43-
44- RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip wget openjdk-8-jdk postgresql-client-11 libmemcached-dev
45-
46-
34+ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip
35+ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y wget
36+ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-8-jdk
37+ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-11
38+ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y libmemcached-dev
4739RUN apt-get install pkg-config libicu-dev -y
4840RUN pip3 install --no-binary=:pyicu: pyicu
4941RUN pip3 install csvkit
50-
5142RUN pip3 install schedule pandas psycopg2-binary requests flask flask-cors redis rq oslash==0.5.1
5243RUN pip3 install git+https://github.com/vaidik/sherlock.git@77742ba91a24f75ee62e1895809901bde018654f
53-
5444RUN wget https://archive.apache.org/dist/spark/spark-2.4.8/spark-2.4.8-bin-hadoop2.7.tgz
45+ # ######################### IGNORE BELOW ##################################################
5546# RUN wget https://apache.claz.org/spark/spark-2.4.8/spark-2.4.8-bin-hadoop2.7.tgz
5647# https://apache.claz.org/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz
5748# && echo "0F5455672045F6110B030CE343C049855B7BA86C0ECB5E39A075FF9D093C7F648DA55DED12E72FFE65D84C32DCD5418A6D764F2D6295A3F894A4286CC80EF478 spark-2.4.7-bin-hadoop2.7.tgz" | sha512sum -c -
58-
5949# RUN wget http://apache.spinellicreations.com/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz && echo "0F5455672045F6110B030CE343C049855B7BA86C0ECB5E39A075FF9D093C7F648DA55DED12E72FFE65D84C32DCD5418A6D764F2D6295A3F894A4286CC80EF478 spark-2.4.7-bin-hadoop2.7.tgz" | sha512sum -c -
60-
50+ # ######################### IGNORE ABOVE ##################################################
6151RUN tar zxvf spark-2.4.8-bin-hadoop2.7.tgz
6252ENV PATH="/spark-2.4.8-bin-hadoop2.7/bin:${PATH}"
6353# set to 1 to reload data from redcap database
@@ -73,14 +63,10 @@ ENV INSERT_DATA=0
7363ENV SERVER=0
7464# set time zone
7565ENV TZ=America/New_York
76-
7766COPY --from=transform ["map-pipeline/target/scala-2.11/TIC preprocessing-assembly-0.2.0.jar" , "TIC preprocessing-assembly.jar" ]
78-
7967COPY ["reload.py" , "reload.py" ]
8068COPY ["server.py" , "server.py" ]
8169COPY ["application.py" , "application.py" ]
8270COPY ["utils.py" , "utils.py" ]
8371# COPY ["test_data.json", "test_data.json"]
84-
85- ENTRYPOINT ["python3" , "application.py" ]
86-
72+ ENTRYPOINT ["python3" , "application.py" ]
0 commit comments