I have a development environment where I run npm run serve
in my local terminal and then docker-compose up -d
in a different terminal to run the services I need to start my system.
I have an instance where I am attempting to run front-end tests, which I run inside of a running container using nightwatchJS, and for some reason the test runner is not accessing the files loaded from npm run serve
. Quite literally when I print out a screenshot using the test runner the page looks as if I have canceled running npm run serve
, however when I go to the page 127.0.0.1
in my browser, everything is loading as usual.
I think my issue is that the test is being run inside of a docker container like so:
docker-compose exec web bash -c "npx nightwatch ...file"
where that specific instance is not running npm run serve
but I am confused as to why it works when I hit the browser personally. I have tried exposing ports in the Dockerfile but that does not work.
Can anybody point me in the right direction?
Here is my Dockerfile:
FROM python:3.8.5-slim-buster
# the first 2 prevent Python from writing out pyc files or from buffering stdin/stdout
# the others are Node
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV NVM_DIR /usr/local/nvm
ENV NODE_VERSION 12.7.0
ENV NODE_PATH $NVM_DIR/v$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH
# the man1 directory is not present for slim-buster so we add that and then install all of the default system based dependencies
# NOTE...TOP LAYERS ARE CACHED FIRST!!!!
RUN mkdir -p /usr/share/man/man1 \
&& apt-get clean && apt-get update -y && apt-get install pdftk-java curl git -y \
&& curl --silent -o- https://raw.githubusercontent.com/creationix/nvm/v0.31.2/install.sh | bash \
&& apt-get install zlib1g-dev libjpeg-dev python3-pythonmagick inkscape xvfb poppler-utils libfile-mimeinfo-perl qpdf libimage-exiftool-perl ufraw-batch ffmpeg gcc procps -y \
&& apt-get clean && apt-get autoclean
# SELINUM
# get wget...
# Adding trusting keys to apt for repositories
RUN apt-get install gnupg -y && apt-get install wget -y \
&& wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list' \
&& apt-get update -y \
&& apt-get install google-chrome-stable -y \
&& apt-get install unzip -yqq
# Set up Chromedriver Env Vars
ENV CHROMEDRIVER_VERSION 87.0.4280.20
ENV CHROMEDRIVER_DIR /chromedriver
# make directory for it...
RUN mkdir $CHROMEDRIVER_DIR
# Download and install Chromedriver
RUN wget -q --continue -P $CHROMEDRIVER_DIR "http://chromedriver.storage.googleapis.com/$CHROMEDRIVER_VERSION/chromedriver_linux64.zip" \
&& unzip $CHROMEDRIVER_DIR/chromedriver* -d $CHROMEDRIVER_DIR \
&& rm "$CHROMEDRIVER_DIR/chromedriver_linux64.zip"
# Put Chromedriver into the PATH
ENV PATH $CHROMEDRIVER_DIR:$PATH
# Set display port as an environment variable
ENV DISPLAY=:99
# SELINUM
## NIGHTMARE
#RUN apt-get install wget -y && wget http://selenium-release.storage.googleapis.com/2.44/selenium-server-standalone-2.44.0.jar -P /bin/
#RUN apt install default-jre -y
#RUN apt-get install -y xvfb x11-xkb-utils xfonts-100dpi xfonts-75dpi xfonts-scalable xfonts-cyrillic x11-apps clang libdbus-1-dev libgtk2.0-dev libnotify-dev libgconf2-dev libasound2-dev libcap-dev libcups2-dev libxtst-dev libxss1 libnss3-dev gcc-multilib g++-multilib
# ensure node is installed, and at the end, make the working directory
RUN . $NVM_DIR/nvm.sh \
&& nvm install $NODE_VERSION \
&& nvm alias default $NODE_VERSION \
&& nvm use default \
&& mkdir /code
# set working directory to /code...it was just made for this purpose
WORKDIR /code
# possible that these will cache so separate them from COPY . /code/
COPY requirements.txt /code/
# now install, this will normally also cache
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
# place this at the end because the code will always change...this will almost never cache...
COPY . /code/
EXPOSE 8001
EXPOSE 8888
Here is my compose file:
version: '3.4'
services:
redis:
image: redis
ports:
- "6379"
restart: unless-stopped
networks:
main:
aliases:
- redis
postgres:
image: postgres:12
ports:
- "5432:5432"
env_file: ./.env
restart: unless-stopped
volumes:
- pgdata:/var/lib/postgresql/data
networks:
main:
aliases:
- postgres
#access by going to localhost:16543
#when adding a server to the serve list
#the hostname is postgres
#the username is postgres
#the password is postgres
pgadmin:
image: dpage/pgadmin4
links:
- postgres
depends_on:
- postgres
env_file: ./.env
restart: unless-stopped
ports:
- "16543:80"
networks:
main:
aliases:
- pgadmin
celery:
build:
network: host
context: .
dockerfile: Dockerfile-dev # use docker-dev because production npm installs and npm builds
command: python manage.py celery
env_file: ./.env
restart: unless-stopped
volumes:
- .:/code
- tmp:/tmp
links:
- redis
depends_on:
- redis
networks:
main:
aliases:
- celery
web:
build:
network: host
context: .
dockerfile: Dockerfile-dev
command: python manage.py runserver 0.0.0.0:8000
volumes:
- .:/code
- tmp:/tmp
ports:
- "8000:8000"
env_file: ./.env
restart: unless-stopped
links:
- postgres
- redis
- celery
- pgadmin
depends_on:
- postgres
- redis
- celery
- pgadmin
networks:
main:
aliases:
- web
volumes:
pgdata:
tmp:
networks:
main: