Skip to content

Commit 4c97538

Browse files
committed
Spring cleaning - forgot to remove the imports in python
1 parent 66c1b76 commit 4c97538

File tree

3 files changed

+10
-25
lines changed

3 files changed

+10
-25
lines changed

Dockerfile

Lines changed: 9 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -11,53 +11,43 @@
1111
# SCHEDULE_RUN_TIME default should be set to 00:00
1212
# need to mount backup dir to POSTGRES_DUMP_PATH
1313
FROM ubuntu:20.04 AS transform
14-
1514
RUN apt-get update && apt-get install -y wget openjdk-8-jdk gnupg
1615
RUN apt-get update && apt-get install curl -y
17-
1816
RUN echo "deb https://repo.scala-sbt.org/scalasbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list
1917
RUN curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | apt-key add
2018
# RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
2119
RUN apt-get update && apt-get install -y sbt
2220
COPY ["map-pipeline", "map-pipeline"]
2321
WORKDIR map-pipeline
2422
RUN sbt assembly
25-
2623
FROM ubuntu:20.04
27-
2824
RUN apt-get update && apt-get install -y wget curl
29-
3025
RUN curl -sSL https://get.haskellstack.org/ | sh
3126
COPY ["map-pipeline-schema", "map-pipeline-schema"]
3227
WORKDIR map-pipeline-schema
3328
RUN stack build
34-
3529
WORKDIR /
3630
RUN mkdir data
37-
3831
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y wget gnupg git tzdata
39-
40-
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ bionic-pgdg main" | tee -a /etc/apt/sources.list.d/pgdg.list
41-
32+
RUN echo "deb http://apt.postgresql.org/pub/repos/apt/ focal-pgdg main" | tee -a /etc/apt/sources.list.d/pgdg.list
4233
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
43-
44-
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip wget openjdk-8-jdk postgresql-client-11 libmemcached-dev
45-
46-
34+
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip
35+
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y wget
36+
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-8-jdk
37+
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y postgresql-client-11
38+
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y libmemcached-dev
4739
RUN apt-get install pkg-config libicu-dev -y
4840
RUN pip3 install --no-binary=:pyicu: pyicu
4941
RUN pip3 install csvkit
50-
5142
RUN pip3 install schedule pandas psycopg2-binary requests flask flask-cors redis rq oslash==0.5.1
5243
RUN pip3 install git+https://github.com/vaidik/sherlock.git@77742ba91a24f75ee62e1895809901bde018654f
53-
5444
RUN wget https://archive.apache.org/dist/spark/spark-2.4.8/spark-2.4.8-bin-hadoop2.7.tgz
45+
########################## IGNORE BELOW ##################################################
5546
# RUN wget https://apache.claz.org/spark/spark-2.4.8/spark-2.4.8-bin-hadoop2.7.tgz
5647
# https://apache.claz.org/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz
5748
#&& echo "0F5455672045F6110B030CE343C049855B7BA86C0ECB5E39A075FF9D093C7F648DA55DED12E72FFE65D84C32DCD5418A6D764F2D6295A3F894A4286CC80EF478 spark-2.4.7-bin-hadoop2.7.tgz" | sha512sum -c -
58-
5949
# RUN wget http://apache.spinellicreations.com/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz && echo "0F5455672045F6110B030CE343C049855B7BA86C0ECB5E39A075FF9D093C7F648DA55DED12E72FFE65D84C32DCD5418A6D764F2D6295A3F894A4286CC80EF478 spark-2.4.7-bin-hadoop2.7.tgz" | sha512sum -c -
60-
50+
########################## IGNORE ABOVE ##################################################
6151
RUN tar zxvf spark-2.4.8-bin-hadoop2.7.tgz
6252
ENV PATH="/spark-2.4.8-bin-hadoop2.7/bin:${PATH}"
6353
# set to 1 to reload data from redcap database
@@ -73,14 +63,10 @@ ENV INSERT_DATA=0
7363
ENV SERVER=0
7464
# set time zone
7565
ENV TZ=America/New_York
76-
7766
COPY --from=transform ["map-pipeline/target/scala-2.11/TIC preprocessing-assembly-0.2.0.jar", "TIC preprocessing-assembly.jar"]
78-
7967
COPY ["reload.py", "reload.py"]
8068
COPY ["server.py", "server.py"]
8169
COPY ["application.py", "application.py"]
8270
COPY ["utils.py", "utils.py"]
8371
# COPY ["test_data.json", "test_data.json"]
84-
85-
ENTRYPOINT ["python3", "application.py"]
86-
72+
ENTRYPOINT ["python3", "application.py"]

reload.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from psycopg2 import connect
2727
from rq import Connection, Queue, Worker
2828
from sherlock import Lock
29-
from tx.functional.either import Left, Right
29+
3030

3131

3232
#most worker functions are found here in reload

server.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
import tempfile
2222
import logging
2323
import csv
24-
from tx.functional.either import Left, Right
2524
import reload
2625
import utils
2726

0 commit comments

Comments
 (0)