* Fix wrapping when too many hosts are shown (#207)

* Update npm packages, fixes CVE-2019-10757

* Revert some breaking packages

* Major overhaul

- Docker buildx support in CI
- Cypress API Testing in CI
- Restructured folder layout (insert clean face meme)
- Added Swagger documentation and validate API against that (to be completed)
- Use common base image for all supported archs, which includes updated nginx with ipv6 support
- Updated certbot and changes required for it
- Large amount of Hosts names will wrap in UI
- Updated packages for frontend
- Version bump 2.1.0

* Updated documentation

* Fix JWT expire time going crazy. Now set to 1day

* Backend JS formatting rules

* Remove v1 importer, I doubt anyone is using v1 anymore

* Added backend formatting rules and enforce them
in Jenkins builds

* Fix CI, doesn't need a tty

* Thanks bcrypt. Why can't you just be normal.

* Cleanup after syntax check

Co-authored-by: Marcelo Castagna <margaale@users.noreply.github.com>
This commit is contained in:
jc21 2020-02-19 15:55:06 +11:00 committed by GitHub
parent bf036cbb88
commit bb0f4bfa62
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
517 changed files with 26256 additions and 11724 deletions

View File

@ -1,12 +0,0 @@
{
"presets": [
["env", {
"targets": {
"browsers": ["Chrome >= 65"]
},
"debug": false,
"modules": false,
"useBuiltIns": "usage"
}]
]
}

11
.gitignore vendored
View File

@ -1,14 +1,5 @@
.DS_Store
.idea
._*
node_modules
core*
config/development.json
dist
webpack_stats.html
data/*
yarn-error.log
yarn.lock
tmp
certbot.log
.vscode

10
.jenkins/config.json Normal file
View File

@ -0,0 +1,10 @@
{
"database": {
"engine": "mysql",
"host": "db",
"name": "npm",
"user": "npm",
"password": "npm",
"port": 3306
}
}

1
.version Normal file
View File

@ -0,0 +1 @@
2.1.0

View File

@ -1,39 +0,0 @@
FROM jc21/nginx-proxy-manager-base:latest
MAINTAINER Jamie Curnow <jc@jc21.com>
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
ENV SUPPRESS_NO_CONFIG_WARNING=1
ENV S6_FIX_ATTRS_HIDDEN=1
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
# Nginx, Node and required packages should already be installed from the base image
# root filesystem
COPY rootfs /
# s6 overlay
RUN curl -L -o /tmp/s6-overlay-amd64.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.21.4.0/s6-overlay-amd64.tar.gz" \
&& tar xzf /tmp/s6-overlay-amd64.tar.gz -C /
# App
ENV NODE_ENV=production
ADD dist /app/dist
ADD node_modules /app/node_modules
ADD src/backend /app/src/backend
ADD package.json /app/package.json
ADD knexfile.js /app/knexfile.js
# Volumes
VOLUME [ "/data", "/etc/letsencrypt" ]
CMD [ "/init" ]
# Ports
EXPOSE 80
EXPOSE 81
EXPOSE 443
EXPOSE 9876
HEALTHCHECK --interval=15s --timeout=3s CMD curl -f http://localhost:9876/health || exit 1

View File

@ -1,38 +0,0 @@
FROM jc21/nginx-proxy-manager-base:arm64
MAINTAINER Jamie Curnow <jc@jc21.com>
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
ENV SUPPRESS_NO_CONFIG_WARNING=1
ENV S6_FIX_ATTRS_HIDDEN=1
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
# Nginx, Node and required packages should already be installed from the base image
# root filesystem
COPY rootfs /
# s6 overlay
RUN curl -L -o /tmp/s6-overlay-aarch64.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.21.8.0/s6-overlay-aarch64.tar.gz" \
&& tar xzf /tmp/s6-overlay-aarch64.tar.gz -C /
# App
ENV NODE_ENV=production
ADD dist /app/dist
ADD node_modules /app/node_modules
ADD src/backend /app/src/backend
ADD package.json /app/package.json
ADD knexfile.js /app/knexfile.js
# Volumes
VOLUME [ "/data", "/etc/letsencrypt" ]
CMD [ "/init" ]
# Ports
EXPOSE 80
EXPOSE 81
EXPOSE 443
EXPOSE 9876
HEALTHCHECK --interval=15s --timeout=3s CMD curl -f http://localhost:9876/health || exit 1

View File

@ -1,38 +0,0 @@
FROM jc21/nginx-proxy-manager-base:armv6
MAINTAINER Jamie Curnow <jc@jc21.com>
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
ENV SUPPRESS_NO_CONFIG_WARNING=1
ENV S6_FIX_ATTRS_HIDDEN=1
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
# Nginx, Node and required packages should already be installed from the base image
# root filesystem
COPY rootfs /
# s6 overlay
RUN curl -L -o /tmp/s6-overlay-arm.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.21.8.0/s6-overlay-arm.tar.gz" \
&& tar xzf /tmp/s6-overlay-arm.tar.gz -C /
# App
ENV NODE_ENV=production
ADD dist /app/dist
ADD node_modules /app/node_modules
ADD src/backend /app/src/backend
ADD package.json /app/package.json
ADD knexfile.js /app/knexfile.js
# Volumes
VOLUME [ "/data", "/etc/letsencrypt" ]
CMD [ "/init" ]
# Ports
EXPOSE 80
EXPOSE 81
EXPOSE 443
EXPOSE 9876
HEALTHCHECK --interval=15s --timeout=3s CMD curl -f http://localhost:9876/health || exit 1

View File

@ -1,38 +0,0 @@
FROM jc21/nginx-proxy-manager-base:armhf
MAINTAINER Jamie Curnow <jc@jc21.com>
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
ENV SUPPRESS_NO_CONFIG_WARNING=1
ENV S6_FIX_ATTRS_HIDDEN=1
RUN echo "fs.file-max = 65535" > /etc/sysctl.conf
# Nginx, Node and required packages should already be installed from the base image
# root filesystem
COPY rootfs /
# s6 overlay
RUN curl -L -o /tmp/s6-overlay-armhf.tar.gz "https://github.com/just-containers/s6-overlay/releases/download/v1.21.4.0/s6-overlay-armhf.tar.gz" \
&& tar xzf /tmp/s6-overlay-armhf.tar.gz -C /
# App
ENV NODE_ENV=production
ADD dist /app/dist
ADD node_modules /app/node_modules
ADD src/backend /app/src/backend
ADD package.json /app/package.json
ADD knexfile.js /app/knexfile.js
# Volumes
VOLUME [ "/data", "/etc/letsencrypt" ]
CMD [ "/init" ]
# Ports
EXPOSE 80
EXPOSE 81
EXPOSE 443
EXPOSE 9876
HEALTHCHECK --interval=15s --timeout=3s CMD curl -f http://localhost:9876/health || exit 1

425
Jenkinsfile vendored
View File

@ -1,348 +1,139 @@
pipeline {
agent {
label 'docker-multiarch'
}
options {
buildDiscarder(logRotator(numToKeepStr: '10'))
buildDiscarder(logRotator(numToKeepStr: '5'))
disableConcurrentBuilds()
}
agent any
environment {
IMAGE = "nginx-proxy-manager"
BASE_IMAGE = "jc21/${IMAGE}-base"
TEMP_IMAGE = "${IMAGE}-build_${BUILD_NUMBER}"
TAG_VERSION = getPackageVersion()
BUILD_VERSION = getVersion()
MAJOR_VERSION = "2"
COMPOSE_PROJECT_NAME = "npm_${GIT_BRANCH}_${BUILD_NUMBER}"
COMPOSE_FILE = 'docker/docker-compose.ci.yml'
COMPOSE_INTERACTIVE_NO_CLI = 1
BUILDX_NAME = "${COMPOSE_PROJECT_NAME}"
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase()}"
// Architectures:
AMD64_TAG = "amd64"
ARMV6_TAG = "armv6l"
ARMV7_TAG = "armv7l"
ARM64_TAG = "arm64"
// Defaults to the Branch name, which is applies to all branches AND pr's
BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}"
}
stages {
stage('Build PR') {
when {
changeRequest()
}
steps {
ansiColor('xterm') {
// Codebase
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} npm run-script build'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} rm -rf node_modules'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install --prod'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} node-prune'
// Docker Build
sh 'docker build --pull --no-cache --squash --compress -t ${TEMP_IMAGE}-${AMD64_TAG} .'
// Dockerhub
sh 'docker tag ${TEMP_IMAGE}-${AMD64_TAG} docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}-${AMD64_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
sh 'docker push docker.io/jc21/${IMAGE}:github-${BRANCH_LOWER}-${AMD64_TAG}'
}
sh 'docker rmi ${TEMP_IMAGE}-${AMD64_TAG}'
script {
def comment = pullRequest.comment("Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}-${AMD64_TAG}`")
}
}
}
}
stage('Build Develop') {
when {
branch 'develop'
}
steps {
ansiColor('xterm') {
// Codebase
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} npm run-script build'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} rm -rf node_modules'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install --prod'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} node-prune'
// Docker Build
sh 'docker build --pull --no-cache --squash --compress -t ${TEMP_IMAGE}-${AMD64_TAG} .'
// Dockerhub
sh 'docker tag ${TEMP_IMAGE}-${AMD64_TAG} docker.io/jc21/${IMAGE}:develop-${AMD64_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
sh 'docker push docker.io/jc21/${IMAGE}:develop-${AMD64_TAG}'
}
sh 'docker rmi ${TEMP_IMAGE}-${AMD64_TAG}'
}
}
}
stage('Build Master') {
when {
branch 'master'
}
stage('Environment') {
parallel {
// ========================
// amd64
// ========================
stage('amd64') {
agent {
label 'amd64'
}
steps {
ansiColor('xterm') {
// Codebase
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} npm run-script build'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} rm -rf node_modules'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install --prod'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} node-prune'
// Docker Build
sh 'docker build --pull --no-cache --squash --compress -t ${TEMP_IMAGE}-${AMD64_TAG} .'
// Dockerhub
sh 'docker tag ${TEMP_IMAGE}-${AMD64_TAG} docker.io/jc21/${IMAGE}:${TAG_VERSION}-${AMD64_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${AMD64_TAG} docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${AMD64_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${AMD64_TAG} docker.io/jc21/${IMAGE}:latest-${AMD64_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
sh 'docker push docker.io/jc21/${IMAGE}:${TAG_VERSION}-${AMD64_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${AMD64_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:latest-${AMD64_TAG}'
}
sh 'docker rmi ${TEMP_IMAGE}-${AMD64_TAG}'
}
}
}
// ========================
// arm64
// ========================
stage('arm64') {
agent {
label 'arm64'
}
steps {
ansiColor('xterm') {
// Codebase
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} npm run-script build'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} rm -rf node_modules'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install --prod'
// Docker Build
sh 'docker build --pull --no-cache --squash --compress -t ${TEMP_IMAGE}-${ARM64_TAG} -f Dockerfile.${ARM64_TAG} .'
// Dockerhub
sh 'docker tag ${TEMP_IMAGE}-${ARM64_TAG} docker.io/jc21/${IMAGE}:${TAG_VERSION}-${ARM64_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${ARM64_TAG} docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${ARM64_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${ARM64_TAG} docker.io/jc21/${IMAGE}:latest-${ARM64_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
sh 'docker push docker.io/jc21/${IMAGE}:${TAG_VERSION}-${ARM64_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${ARM64_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:latest-${ARM64_TAG}'
}
sh 'docker rmi ${TEMP_IMAGE}-${ARM64_TAG}'
}
}
}
// ========================
// armv7l
// ========================
stage('armv7l') {
agent {
label 'armv7l'
}
steps {
ansiColor('xterm') {
// Codebase
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} npm run-script build'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} rm -rf node_modules'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install --prod'
// Docker Build
sh 'docker build --pull --no-cache --squash --compress -t ${TEMP_IMAGE}-${ARMV7_TAG} -f Dockerfile.${ARMV7_TAG} .'
// Dockerhub
sh 'docker tag ${TEMP_IMAGE}-${ARMV7_TAG} docker.io/jc21/${IMAGE}:${TAG_VERSION}-${ARMV7_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${ARMV7_TAG} docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV7_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${ARMV7_TAG} docker.io/jc21/${IMAGE}:latest-${ARMV7_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
sh 'docker push docker.io/jc21/${IMAGE}:${TAG_VERSION}-${ARMV7_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV7_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:latest-${ARMV7_TAG}'
}
sh 'docker rmi ${TEMP_IMAGE}-${ARMV7_TAG}'
}
}
}
// ========================
// armv6l - Disabled for the time being
// ========================
/*
stage('armv6l') {
agent {
label 'armv6l'
}
steps {
ansiColor('xterm') {
// Codebase
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} npm run-script build'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} rm -rf node_modules'
sh 'docker run --rm -v $(pwd):/app -w /app ${BASE_IMAGE} yarn install --prod'
// Docker Build
sh 'docker build --pull --no-cache --squash --compress -t ${TEMP_IMAGE}-${ARMV6_TAG} -f Dockerfile.${ARMV6_TAG} .'
// Dockerhub
sh 'docker tag ${TEMP_IMAGE}-${ARMV6_TAG} docker.io/jc21/${IMAGE}:${TAG_VERSION}-${ARMV6_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${ARMV6_TAG} docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV6_TAG}'
sh 'docker tag ${TEMP_IMAGE}-${ARMV6_TAG} docker.io/jc21/${IMAGE}:latest-${ARMV6_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
sh 'docker push docker.io/jc21/${IMAGE}:${TAG_VERSION}-${ARMV6_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV6_TAG}'
sh 'docker push docker.io/jc21/${IMAGE}:latest-${ARMV6_TAG}'
}
sh 'docker rmi ${TEMP_IMAGE}-${ARMV6_TAG}'
}
}
}
*/
}
}
// ========================
// latest manifest
// ========================
stage('Latest Manifest') {
stage('Master') {
when {
branch 'master'
}
steps {
script {
env.BUILDX_PUSH_TAGS = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION}"
}
}
}
}
}
stage('Frontend') {
steps {
ansiColor('xterm') {
// =======================
// latest
// =======================
sh 'docker pull jc21/${IMAGE}:latest-${AMD64_TAG}'
sh 'docker pull jc21/${IMAGE}:latest-${ARM64_TAG}'
sh 'docker pull jc21/${IMAGE}:latest-${ARMV7_TAG}'
//sh 'docker pull jc21/${IMAGE}:latest-${ARMV6_TAG}'
sh 'docker manifest push --purge jc21/${IMAGE}:latest || echo ""'
sh 'docker manifest create jc21/${IMAGE}:latest jc21/${IMAGE}:latest-${AMD64_TAG} jc21/${IMAGE}:latest-${ARM64_TAG} jc21/${IMAGE}:latest-${ARMV7_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:latest jc21/${IMAGE}:latest-${AMD64_TAG} --arch ${AMD64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:latest jc21/${IMAGE}:latest-${ARM64_TAG} --os linux --arch ${ARM64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:latest jc21/${IMAGE}:latest-${ARMV7_TAG} --os linux --arch arm --variant ${ARMV7_TAG}'
//sh 'docker manifest annotate jc21/${IMAGE}:latest jc21/${IMAGE}:latest-${ARMV6_TAG} --os linux --arch arm --variant ${ARMV6_TAG}'
sh 'docker manifest push --purge jc21/${IMAGE}:latest'
// =======================
// major version
// =======================
sh 'docker pull jc21/${IMAGE}:${MAJOR_VERSION}-${AMD64_TAG}'
sh 'docker pull jc21/${IMAGE}:${MAJOR_VERSION}-${ARM64_TAG}'
sh 'docker pull jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV7_TAG}'
//sh 'docker pull jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV6_TAG}'
sh 'docker manifest push --purge jc21/${IMAGE}:${MAJOR_VERSION} || echo ""'
sh 'docker manifest create jc21/${IMAGE}:${MAJOR_VERSION} jc21/${IMAGE}:${MAJOR_VERSION}-${AMD64_TAG} jc21/${IMAGE}:${MAJOR_VERSION}-${ARM64_TAG} jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV7_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:${MAJOR_VERSION} jc21/${IMAGE}:${MAJOR_VERSION}-${AMD64_TAG} --arch ${AMD64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:${MAJOR_VERSION} jc21/${IMAGE}:${MAJOR_VERSION}-${ARM64_TAG} --os linux --arch ${ARM64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:${MAJOR_VERSION} jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV7_TAG} --os linux --arch arm --variant ${ARMV7_TAG}'
//sh 'docker manifest annotate jc21/${IMAGE}:${MAJOR_VERSION} jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV6_TAG} --os linux --arch arm --variant ${ARMV6_TAG}'
// =======================
// version
// =======================
sh 'docker pull jc21/${IMAGE}:${TAG_VERSION}-${AMD64_TAG}'
sh 'docker pull jc21/${IMAGE}:${TAG_VERSION}-${ARM64_TAG}'
sh 'docker pull jc21/${IMAGE}:${TAG_VERSION}-${ARMV7_TAG}'
//sh 'docker pull jc21/${IMAGE}:${TAG_VERSION}-${ARMV6_TAG}'
sh 'docker manifest push --purge jc21/${IMAGE}:${TAG_VERSION} || echo ""'
sh 'docker manifest create jc21/${IMAGE}:${TAG_VERSION} jc21/${IMAGE}:${TAG_VERSION}-${AMD64_TAG} jc21/${IMAGE}:${TAG_VERSION}-${ARM64_TAG} jc21/${IMAGE}:${TAG_VERSION}-${ARMV7_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:${TAG_VERSION} jc21/${IMAGE}:${TAG_VERSION}-${AMD64_TAG} --arch ${AMD64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:${TAG_VERSION} jc21/${IMAGE}:${TAG_VERSION}-${ARM64_TAG} --os linux --arch ${ARM64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:${TAG_VERSION} jc21/${IMAGE}:${TAG_VERSION}-${ARMV7_TAG} --os linux --arch arm --variant ${ARMV7_TAG}'
//sh 'docker manifest annotate jc21/${IMAGE}:${TAG_VERSION} jc21/${IMAGE}:${TAG_VERSION}-${ARMV6_TAG} --os linux --arch arm --variant ${ARMV6_TAG}'
sh './scripts/frontend-build'
}
}
}
// ========================
// develop
// ========================
stage('Develop Manifest') {
stage('Backend') {
steps {
ansiColor('xterm') {
echo 'Checking Syntax ...'
// See: https://github.com/yarnpkg/yarn/issues/3254
sh '''docker run --rm \\
-v "$(pwd)/backend:/app" \\
-w /app \\
node:latest \\
sh -c "yarn install && yarn eslint . && rm -rf node_modules"
'''
echo 'Docker Build ...'
sh '''docker build --pull --no-cache --squash --compress \\
-t "${IMAGE}:ci-${BUILD_NUMBER}" \\
-f docker/Dockerfile \\
--build-arg TARGETPLATFORM=linux/amd64 \\
--build-arg BUILDPLATFORM=linux/amd64 \\
--build-arg BUILD_VERSION="${BUILD_VERSION}" \\
--build-arg BUILD_COMMIT="${BUILD_COMMIT}" \\
--build-arg BUILD_DATE="$(date '+%Y-%m-%d %T %Z')" \\
.
'''
}
}
}
stage('Test') {
steps {
ansiColor('xterm') {
// Bring up a stack
sh 'docker-compose up -d fullstack'
sh './scripts/wait-healthy $(docker-compose ps -q fullstack) 120'
// Run tests
sh 'rm -rf test/results'
sh 'docker-compose up cypress'
// Get results
sh 'docker cp -L "$(docker-compose ps -q cypress):/results" test/'
}
}
post {
always {
junit 'test/results/junit/*'
// Cypress videos and screenshot artifacts
dir(path: 'test/results') {
archiveArtifacts allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml'
}
// Dumps to analyze later
sh 'mkdir -p debug'
sh 'docker-compose logs fullstack | gzip > debug/docker_fullstack.log.gz'
}
}
}
stage('MultiArch Build') {
when {
branch 'develop'
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
ansiColor('xterm') {
sh 'docker pull jc21/${IMAGE}:develop-${AMD64_TAG}'
//sh 'docker pull jc21/${IMAGE}:develop-${ARM64_TAG}'
//sh 'docker pull jc21/${IMAGE}:develop-${ARMV7_TAG}'
//sh 'docker pull jc21/${IMAGE}:${TAG_VERSION}-${ARMV6_TAG}'
sh 'docker manifest push --purge jc21/${IMAGE}:develop || :'
sh 'docker manifest create jc21/${IMAGE}:develop jc21/${IMAGE}:develop-${AMD64_TAG}'
sh 'docker manifest annotate jc21/${IMAGE}:develop jc21/${IMAGE}:develop-${AMD64_TAG} --arch ${AMD64_TAG}'
//sh 'docker manifest annotate jc21/${IMAGE}:develop jc21/${IMAGE}:develop-${ARM64_TAG} --os linux --arch ${ARM64_TAG}'
//sh 'docker manifest annotate jc21/${IMAGE}:develop jc21/${IMAGE}:develop-${ARMV7_TAG} --os linux --arch arm --variant ${ARMV7_TAG}'
//sh 'docker manifest annotate jc21/${IMAGE}:develop jc21/${IMAGE}:develop-${ARMV6_TAG} --os linux --arch arm --variant ${ARMV6_TAG}'
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh "docker login -u '${duser}' -p '${dpass}'"
// Buildx with push
sh "./scripts/buildx --push ${BUILDX_PUSH_TAGS}"
}
}
}
// ========================
// cleanup
// ========================
stage('Latest Cleanup') {
when {
branch 'master'
}
steps {
ansiColor('xterm') {
sh 'docker rmi jc21/${IMAGE}:latest jc21/${IMAGE}:latest-${AMD64_TAG} jc21/${IMAGE}:latest-${ARM64_TAG} jc21/${IMAGE}:latest-${ARMV7_TAG} || echo ""'
sh 'docker rmi jc21/${IMAGE}:${MAJOR_VERSION}-${AMD64_TAG} jc21/${IMAGE}:${MAJOR_VERSION}-${ARM64_TAG} jc21/${IMAGE}:${MAJOR_VERSION}-${ARMV7_TAG} || echo ""'
sh 'docker rmi jc21/${IMAGE}:${TAG_VERSION}-${AMD64_TAG} jc21/${IMAGE}:${TAG_VERSION}-${ARM64_TAG} jc21/${IMAGE}:${TAG_VERSION}-${ARMV7_TAG} || echo ""'
}
}
}
stage('Develop Cleanup') {
when {
branch 'develop'
}
steps {
ansiColor('xterm') {
sh 'docker rmi jc21/${IMAGE}:develop jc21/${IMAGE}:develop-${AMD64_TAG} || echo ""'
}
}
}
stage('PR Cleanup') {
}
stage('PR Comment') {
when {
allOf {
changeRequest()
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
ansiColor('xterm') {
sh 'docker rmi jc21/${IMAGE}:github-${BRANCH_LOWER}-${AMD64_TAG} || echo ""'
script {
def comment = pullRequest.comment("Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/jc21/${IMAGE}) as `jc21/${IMAGE}:github-${BRANCH_LOWER}`")
}
}
}
}
}
post {
always {
sh 'docker-compose down --rmi all --remove-orphans --volumes -t 30'
sh 'echo Reverting ownership'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} chown -R $(id -u):$(id -g) /data'
}
success {
juxtapose event: 'success'
sh 'figlet "SUCCESS"'
@ -351,14 +142,20 @@ pipeline {
juxtapose event: 'failure'
sh 'figlet "FAILURE"'
}
always {
sh 'echo Reverting ownership'
sh 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} chown -R $(id -u):$(id -g) /data'
unstable {
archiveArtifacts(artifacts: 'debug/**.*', allowEmptyArchive: true)
juxtapose event: 'unstable'
sh 'figlet "UNSTABLE"'
}
}
}
def getPackageVersion() {
ver = sh(script: 'docker run --rm -v $(pwd):/data ${DOCKER_CI_TOOLS} bash -c "cat /data/package.json|jq -r \'.version\'"', returnStdout: true)
def getVersion() {
ver = sh(script: 'cat .version', returnStdout: true)
return ver.trim()
}
def getCommit() {
ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
return ver.trim()
}

View File

@ -6,6 +6,8 @@
![Stars](https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge)
![Pulls](https://img.shields.io/docker/pulls/jc21/nginx-proxy-manager.svg?style=for-the-badge)
[![Build Status](https://ci.nginxproxymanager.jc21.com/buildStatus/icon?job=nginx-proxy-manager%2Fmaster&style=flat-square)](https://ci.nginxproxymanager.jc21.com/job/nginx-proxy-manager/job/master/)
This project comes as a pre-built docker image that enables you to easily forward to your websites
running at home or otherwise, including free SSL, without having to know too much about Nginx or Letsencrypt.
@ -84,8 +86,8 @@ I won't go in to too much detail here but here are the basics for someone new to
1. Your home router will have a Port Forwarding section somewhere. Log in and find it
2. Add port forwarding for port 80 and 443 to the server hosting this project
3. Configure your domain name details to point to your home, either with a static ip or a service like DuckDNS
4. Use the Nginx Proxy Manager here as your gateway to forward to your other web based services
3. Configure your domain name details to point to your home, either with a static ip or a service like DuckDNS or [Amazon Route53](https://github.com/jc21/route53-ddns)
4. Use the Nginx Proxy Manager as your gateway to forward to your other web based services
## Nginx Proxy Manager in the wild

73
backend/.eslintrc.json Normal file
View File

@ -0,0 +1,73 @@
{
"env": {
"node": true,
"es6": true
},
"extends": [
"eslint:recommended"
],
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018,
"sourceType": "module"
},
"plugins": [
"align-assignments"
],
"rules": {
"arrow-parens": [
"error",
"always"
],
"indent": [
"error",
"tab"
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
],
"key-spacing": [
"error",
{
"align": "value"
}
],
"comma-spacing": [
"error",
{
"before": false,
"after": true
}
],
"func-call-spacing": [
"error",
"never"
],
"keyword-spacing": [
"error",
{
"before": true
}
],
"no-irregular-whitespace": "error",
"no-unused-expressions": 0,
"align-assignments/align-assignments": [
2,
{
"requiresOnly": false
}
]
}
}

6
backend/.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
config/development.json
data/*
yarn-error.log
tmp
certbot.log
node_modules

11
backend/.prettierrc Normal file
View File

@ -0,0 +1,11 @@
{
"printWidth": 320,
"tabWidth": 4,
"useTabs": true,
"semi": true,
"singleQuote": true,
"bracketSpacing": true,
"jsxBracketSameLine": true,
"trailingComma": "all",
"proseWrap": "always"
}

90
backend/app.js Normal file
View File

@ -0,0 +1,90 @@
const express = require('express');
const bodyParser = require('body-parser');
const fileUpload = require('express-fileupload');
const compression = require('compression');
const log = require('./logger').express;
/**
* App
*/
const app = express();
app.use(fileUpload());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: true}));
// Gzip
app.use(compression());
/**
* General Logging, BEFORE routes
*/
app.disable('x-powered-by');
app.enable('trust proxy', ['loopback', 'linklocal', 'uniquelocal']);
app.enable('strict routing');
// pretty print JSON when not live
if (process.env.NODE_ENV !== 'production') {
app.set('json spaces', 2);
}
// CORS for everything
app.use(require('./lib/express/cors'));
// General security/cache related headers + server header
app.use(function (req, res, next) {
let x_frame_options = 'DENY';
if (typeof process.env.X_FRAME_OPTIONS !== 'undefined' && process.env.X_FRAME_OPTIONS) {
x_frame_options = process.env.X_FRAME_OPTIONS;
}
res.set({
'Strict-Transport-Security': 'includeSubDomains; max-age=631138519; preload',
'X-XSS-Protection': '1; mode=block',
'X-Content-Type-Options': 'nosniff',
'X-Frame-Options': x_frame_options,
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate',
Pragma: 'no-cache',
Expires: 0
});
next();
});
app.use(require('./lib/express/jwt')());
app.use('/', require('./routes/api/main'));
// production error handler
// no stacktraces leaked to user
// eslint-disable-next-line
app.use(function (err, req, res, next) {
let payload = {
error: {
code: err.status,
message: err.public ? err.message : 'Internal Error'
}
};
if (process.env.NODE_ENV === 'development') {
payload.debug = {
stack: typeof err.stack !== 'undefined' && err.stack ? err.stack.split('\n') : null,
previous: err.previous
};
}
// Not every error is worth logging - but this is good for now until it gets annoying.
if (typeof err.stack !== 'undefined' && err.stack) {
if (process.env.NODE_ENV === 'development') {
log.debug(err.stack);
} else if (typeof err.public == 'undefined' || !err.public) {
log.warn(err.message);
}
}
res
.status(err.status || 500)
.send(payload);
});
module.exports = app;

25
backend/db.js Normal file
View File

@ -0,0 +1,25 @@
const config = require('config');
if (!config.has('database')) {
throw new Error('Database config does not exist! Please read the instructions: https://github.com/jc21/nginx-proxy-manager/blob/master/doc/INSTALL.md');
}
let data = {
client: config.database.engine,
connection: {
host: config.database.host,
user: config.database.user,
password: config.database.password,
database: config.database.name,
port: config.database.port
},
migrations: {
tableName: 'migrations'
}
};
if (typeof config.database.version !== 'undefined') {
data.version = config.database.version;
}
module.exports = require('knex')(data);

1254
backend/doc/api.swagger.json Normal file

File diff suppressed because it is too large Load Diff

47
backend/index.js Normal file
View File

@ -0,0 +1,47 @@
#!/usr/bin/env node
const logger = require('./logger').global;
function appStart () {
const migrate = require('./migrate');
const setup = require('./setup');
const app = require('./app');
const apiValidator = require('./lib/validator/api');
const internalCertificate = require('./internal/certificate');
const internalIpRanges = require('./internal/ip_ranges');
return migrate.latest()
.then(setup)
.then(() => {
return apiValidator.loadSchemas;
})
.then(internalIpRanges.fetch)
.then(() => {
internalCertificate.initTimer();
internalIpRanges.initTimer();
const server = app.listen(3000, () => {
logger.info('Backend PID ' + process.pid + ' listening on port 3000 ...');
process.on('SIGTERM', () => {
logger.info('PID ' + process.pid + ' received SIGTERM');
server.close(() => {
logger.info('Stopping.');
process.exit(0);
});
});
});
})
.catch((err) => {
logger.error(err.message);
setTimeout(appStart, 1000);
});
}
try {
appStart();
} catch (err) {
logger.error(err.message, err);
process.exit(1);
}

View File

@ -0,0 +1,482 @@
const _ = require('lodash');
const fs = require('fs');
const batchflow = require('batchflow');
const logger = require('../logger').access;
const error = require('../lib/error');
const accessListModel = require('../models/access_list');
const accessListAuthModel = require('../models/access_list_auth');
const proxyHostModel = require('../models/proxy_host');
const internalAuditLog = require('./audit-log');
const internalNginx = require('./nginx');
const utils = require('../lib/utils');
function omissions () {
return ['is_deleted'];
}
const internalAccessList = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
return access.can('access_lists:create', data)
.then((/*access_data*/) => {
return accessListModel
.query()
.omit(omissions())
.insertAndFetch({
name: data.name,
owner_user_id: access.token.getUserId(1)
});
})
.then((row) => {
data.id = row.id;
// Now add the items
let promises = [];
data.items.map((item) => {
promises.push(accessListAuthModel
.query()
.insert({
access_list_id: row.id,
username: item.username,
password: item.password
})
);
});
return Promise.all(promises);
})
.then(() => {
// re-fetch with expansions
return internalAccessList.get(access, {
id: data.id,
expand: ['owner', 'items']
}, true /* <- skip masking */);
})
.then((row) => {
// Audit log
data.meta = _.assign({}, data.meta || {}, row.meta);
return internalAccessList.build(row)
.then(() => {
if (row.proxy_host_count) {
return internalNginx.reload();
}
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'access-list',
object_id: row.id,
meta: internalAccessList.maskItems(data)
});
})
.then(() => {
return internalAccessList.maskItems(row);
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
* @param {String} [data.name]
* @param {String} [data.items]
* @return {Promise}
*/
update: (access, data) => {
return access.can('access_lists:update', data.id)
.then((/*access_data*/) => {
return internalAccessList.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Access List could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
})
.then(() => {
// patch name if specified
if (typeof data.name !== 'undefined' && data.name) {
return accessListModel
.query()
.where({id: data.id})
.patch({
name: data.name
});
}
})
.then(() => {
// Check for items and add/update/remove them
if (typeof data.items !== 'undefined' && data.items) {
let promises = [];
let items_to_keep = [];
data.items.map(function (item) {
if (item.password) {
promises.push(accessListAuthModel
.query()
.insert({
access_list_id: data.id,
username: item.username,
password: item.password
})
);
} else {
// This was supplied with an empty password, which means keep it but don't change the password
items_to_keep.push(item.username);
}
});
let query = accessListAuthModel
.query()
.delete()
.where('access_list_id', data.id);
if (items_to_keep.length) {
query.andWhere('username', 'NOT IN', items_to_keep);
}
return query
.then(() => {
// Add new items
if (promises.length) {
return Promise.all(promises);
}
});
}
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'access-list',
object_id: data.id,
meta: internalAccessList.maskItems(data)
});
})
.then(() => {
// re-fetch with expansions
return internalAccessList.get(access, {
id: data.id,
expand: ['owner', 'items']
}, true /* <- skip masking */);
})
.then((row) => {
return internalAccessList.build(row)
.then(() => {
if (row.proxy_host_count) {
return internalNginx.reload();
}
})
.then(() => {
return internalAccessList.maskItems(row);
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @param {Boolean} [skip_masking]
* @return {Promise}
*/
get: (access, data, skip_masking) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('access_lists:get', data.id)
.then((access_data) => {
let query = accessListModel
.query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.andWhere('access_list.id', data.id)
.allowEager('[owner,items,proxy_hosts]')
.omit(['access_list.is_deleted'])
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('access_list.owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
if (!skip_masking && typeof row.items !== 'undefined' && row.items) {
row = internalAccessList.maskItems(row);
}
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('access_lists:delete', data.id)
.then(() => {
return internalAccessList.get(access, {id: data.id, expand: ['proxy_hosts', 'items']});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
// 1. update row to be deleted
// 2. update any proxy hosts that were using it (ignoring permissions)
// 3. reconfigure those hosts
// 4. audit log
// 1. update row to be deleted
return accessListModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// 2. update any proxy hosts that were using it (ignoring permissions)
if (row.proxy_hosts) {
return proxyHostModel
.query()
.where('access_list_id', '=', row.id)
.patch({access_list_id: 0})
.then(() => {
// 3. reconfigure those hosts, then reload nginx
// set the access_list_id to zero for these items
row.proxy_hosts.map(function (val, idx) {
row.proxy_hosts[idx].access_list_id = 0;
});
return internalNginx.bulkGenerateConfigs('proxy_host', row.proxy_hosts);
})
.then(() => {
return internalNginx.reload();
});
}
})
.then(() => {
// delete the htpasswd file
let htpasswd_file = internalAccessList.getFilename(row);
try {
fs.unlinkSync(htpasswd_file);
} catch (err) {
// do nothing
}
})
.then(() => {
// 4. audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'access-list',
object_id: row.id,
meta: _.omit(internalAccessList.maskItems(row), ['is_deleted', 'proxy_hosts'])
});
});
})
.then(() => {
return true;
});
},
/**
* All Lists
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('access_lists:list')
.then((access_data) => {
let query = accessListModel
.query()
.select('access_list.*', accessListModel.raw('COUNT(proxy_host.id) as proxy_host_count'))
.joinRaw('LEFT JOIN `proxy_host` ON `proxy_host`.`access_list_id` = `access_list`.`id` AND `proxy_host`.`is_deleted` = 0')
.where('access_list.is_deleted', 0)
.groupBy('access_list.id')
.omit(['access_list.is_deleted'])
.allowEager('[owner,items]')
.orderBy('access_list.name', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('name', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
})
.then((rows) => {
if (rows) {
rows.map(function (row, idx) {
if (typeof row.items !== 'undefined' && row.items) {
rows[idx] = internalAccessList.maskItems(row);
}
});
}
return rows;
});
},
/**
* Report use
*
* @param {Integer} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = accessListModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
},
/**
* @param {Object} list
* @returns {Object}
*/
maskItems: (list) => {
if (list && typeof list.items !== 'undefined') {
list.items.map(function (val, idx) {
let repeat_for = 8;
let first_char = '*';
if (typeof val.password !== 'undefined' && val.password) {
repeat_for = val.password.length - 1;
first_char = val.password.charAt(0);
}
list.items[idx].hint = first_char + ('*').repeat(repeat_for);
list.items[idx].password = '';
});
}
return list;
},
/**
* @param {Object} list
* @param {Integer} list.id
* @returns {String}
*/
getFilename: (list) => {
return '/data/access/' + list.id;
},
/**
* @param {Object} list
* @param {Integer} list.id
* @param {String} list.name
* @param {Array} list.items
* @returns {Promise}
*/
build: (list) => {
logger.info('Building Access file #' + list.id + ' for: ' + list.name);
return new Promise((resolve, reject) => {
let htpasswd_file = internalAccessList.getFilename(list);
// 1. remove any existing access file
try {
fs.unlinkSync(htpasswd_file);
} catch (err) {
// do nothing
}
// 2. create empty access file
try {
fs.writeFileSync(htpasswd_file, '', {encoding: 'utf8'});
resolve(htpasswd_file);
} catch (err) {
reject(err);
}
})
.then((htpasswd_file) => {
// 3. generate password for each user
if (list.items.length) {
return new Promise((resolve, reject) => {
batchflow(list.items).sequential()
.each((i, item, next) => {
if (typeof item.password !== 'undefined' && item.password.length) {
logger.info('Adding: ' + item.username);
utils.exec('/usr/bin/htpasswd -b "' + htpasswd_file + '" "' + item.username + '" "' + item.password + '"')
.then((/*result*/) => {
next();
})
.catch((err) => {
logger.error(err);
next(err);
});
}
})
.error((err) => {
logger.error(err);
reject(err);
})
.end((results) => {
logger.success('Built Access file #' + list.id + ' for: ' + list.name);
resolve(results);
});
});
}
});
}
};
module.exports = internalAccessList;

View File

@ -0,0 +1,78 @@
const error = require('../lib/error');
const auditLogModel = require('../models/audit-log');
const internalAuditLog = {
/**
* All logs
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('auditlog:list')
.then(() => {
let query = auditLogModel
.query()
.orderBy('created_on', 'DESC')
.orderBy('id', 'DESC')
.limit(100)
.allowEager('[user]');
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('meta', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
});
},
/**
* This method should not be publicly used, it doesn't check certain things. It will be assumed
* that permission to add to audit log is already considered, however the access token is used for
* default user id determination.
*
* @param {Access} access
* @param {Object} data
* @param {String} data.action
* @param {Number} [data.user_id]
* @param {Number} [data.object_id]
* @param {Number} [data.object_type]
* @param {Object} [data.meta]
* @returns {Promise}
*/
add: (access, data) => {
return new Promise((resolve, reject) => {
// Default the user id
if (typeof data.user_id === 'undefined' || !data.user_id) {
data.user_id = access.token.getUserId(1);
}
if (typeof data.action === 'undefined' || !data.action) {
reject(new error.InternalValidationError('Audit log entry must contain an Action'));
} else {
// Make sure at least 1 of the IDs are set and action
resolve(auditLogModel
.query()
.insert({
user_id: data.user_id,
action: data.action,
object_type: data.object_type || '',
object_id: data.object_id || 0,
meta: data.meta || {}
}));
}
});
}
};
module.exports = internalAuditLog;

View File

@ -0,0 +1,926 @@
const fs = require('fs');
const _ = require('lodash');
const logger = require('../logger').ssl;
const error = require('../lib/error');
const certificateModel = require('../models/certificate');
const internalAuditLog = require('./audit-log');
const tempWrite = require('temp-write');
const utils = require('../lib/utils');
const moment = require('moment');
const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
const le_staging = process.env.NODE_ENV !== 'production';
const internalNginx = require('./nginx');
const internalHost = require('./host');
const certbot_command = '/usr/bin/certbot';
const le_config = '/etc/letsencrypt.ini';
function omissions() {
return ['is_deleted'];
}
const internalCertificate = {
allowed_ssl_files: ['certificate', 'certificate_key', 'intermediate_certificate'],
interval_timeout: 1000 * 60 * 60, // 1 hour
interval: null,
interval_processing: false,
initTimer: () => {
logger.info('Let\'s Encrypt Renewal Timer initialized');
internalCertificate.interval = setInterval(internalCertificate.processExpiringHosts, internalCertificate.interval_timeout);
// And do this now as well
internalCertificate.processExpiringHosts();
},
/**
* Triggered by a timer, this will check for expiring hosts and renew their ssl certs if required
*/
processExpiringHosts: () => {
if (!internalCertificate.interval_processing) {
internalCertificate.interval_processing = true;
logger.info('Renewing SSL certs close to expiry...');
let cmd = certbot_command + ' renew --non-interactive --quiet ' +
'--config "' + le_config + '" ' +
'--preferred-challenges "dns,http" ' +
'--disable-hook-validation ' +
(le_staging ? '--staging' : '');
return utils.exec(cmd)
.then((result) => {
if (result) {
logger.info('Renew Result: ' + result);
}
return internalNginx.reload()
.then(() => {
logger.info('Renew Complete');
return result;
});
})
.then(() => {
// Now go and fetch all the letsencrypt certs from the db and query the files and update expiry times
return certificateModel
.query()
.where('is_deleted', 0)
.andWhere('provider', 'letsencrypt')
.then((certificates) => {
if (certificates && certificates.length) {
let promises = [];
certificates.map(function (certificate) {
promises.push(
internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
.then((cert_info) => {
return certificateModel
.query()
.where('id', certificate.id)
.andWhere('provider', 'letsencrypt')
.patch({
expires_on: certificateModel.raw('FROM_UNIXTIME(' + cert_info.dates.to + ')')
});
})
.catch((err) => {
// Don't want to stop the train here, just log the error
logger.error(err.message);
})
);
});
return Promise.all(promises);
}
});
})
.then(() => {
internalCertificate.interval_processing = false;
})
.catch((err) => {
logger.error(err);
internalCertificate.interval_processing = false;
});
}
},
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
return access.can('certificates:create', data)
.then(() => {
data.owner_user_id = access.token.getUserId(1);
if (data.provider === 'letsencrypt') {
data.nice_name = data.domain_names.sort().join(', ');
}
return certificateModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((certificate) => {
if (certificate.provider === 'letsencrypt') {
// Request a new Cert from LE. Let the fun begin.
// 1. Find out any hosts that are using any of the hostnames in this cert
// 2. Disable them in nginx temporarily
// 3. Generate the LE config
// 4. Request cert
// 5. Remove LE config
// 6. Re-instate previously disabled hosts
// 1. Find out any hosts that are using any of the hostnames in this cert
return internalHost.getHostsWithDomains(certificate.domain_names)
.then((in_use_result) => {
// 2. Disable them in nginx temporarily
return internalCertificate.disableInUseHosts(in_use_result)
.then(() => {
return in_use_result;
});
})
.then((in_use_result) => {
// 3. Generate the LE config
return internalNginx.generateLetsEncryptRequestConfig(certificate)
.then(internalNginx.reload)
.then(() => {
// 4. Request cert
return internalCertificate.requestLetsEncryptSsl(certificate);
})
.then(() => {
// 5. Remove LE config
return internalNginx.deleteLetsEncryptRequestConfig(certificate);
})
.then(internalNginx.reload)
.then(() => {
// 6. Re-instate previously disabled hosts
return internalCertificate.enableInUseHosts(in_use_result);
})
.then(() => {
return certificate;
})
.catch((err) => {
// In the event of failure, revert things and throw err back
return internalNginx.deleteLetsEncryptRequestConfig(certificate)
.then(() => {
return internalCertificate.enableInUseHosts(in_use_result);
})
.then(internalNginx.reload)
.then(() => {
throw err;
});
});
})
.then(() => {
// At this point, the letsencrypt cert should exist on disk.
// Lets get the expiry date from the file and update the row silently
return internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem')
.then((cert_info) => {
return certificateModel
.query()
.patchAndFetchById(certificate.id, {
expires_on: certificateModel.raw('FROM_UNIXTIME(' + cert_info.dates.to + ')')
})
.then((saved_row) => {
// Add cert data for audit log
saved_row.meta = _.assign({}, saved_row.meta, {
letsencrypt_certificate: cert_info
});
return saved_row;
});
});
});
} else {
return certificate;
}
}).then((certificate) => {
data.meta = _.assign({}, data.meta || {}, certificate.meta);
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'certificate',
object_id: certificate.id,
meta: data
})
.then(() => {
return certificate;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.email]
* @param {String} [data.name]
* @return {Promise}
*/
update: (access, data) => {
return access.can('certificates:update', data.id)
.then((/*access_data*/) => {
return internalCertificate.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Certificate could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
return certificateModel
.query()
.omit(omissions())
.patchAndFetchById(row.id, data)
.then((saved_row) => {
saved_row.meta = internalCertificate.cleanMeta(saved_row.meta);
data.meta = internalCertificate.cleanMeta(data.meta);
// Add row.nice_name for custom certs
if (saved_row.provider === 'other') {
data.nice_name = saved_row.nice_name;
}
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'certificate',
object_id: row.id,
meta: _.omit(data, ['expires_on']) // this prevents json circular reference because expires_on might be raw
})
.then(() => {
return _.omit(saved_row, omissions());
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('certificates:get', data.id)
.then((access_data) => {
let query = certificateModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[owner]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('certificates:delete', data.id)
.then(() => {
return internalCertificate.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
return certificateModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// Add to audit log
row.meta = internalCertificate.cleanMeta(row.meta);
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'certificate',
object_id: row.id,
meta: _.omit(row, omissions())
});
})
.then(() => {
if (row.provider === 'letsencrypt') {
// Revoke the cert
return internalCertificate.revokeLetsEncryptSsl(row);
}
});
})
.then(() => {
return true;
});
},
/**
* All Certs
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('certificates:list')
.then((access_data) => {
let query = certificateModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[owner]')
.orderBy('nice_name', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('name', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
});
},
/**
* Report use
*
* @param {Number} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = certificateModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
},
/**
* @param {Object} certificate
* @returns {Promise}
*/
writeCustomCert: (certificate) => {
if (debug_mode) {
logger.info('Writing Custom Certificate:', certificate);
}
let dir = '/data/custom_ssl/npm-' + certificate.id;
return new Promise((resolve, reject) => {
if (certificate.provider === 'letsencrypt') {
reject(new Error('Refusing to write letsencrypt certs here'));
return;
}
let cert_data = certificate.meta.certificate;
if (typeof certificate.meta.intermediate_certificate !== 'undefined') {
cert_data = cert_data + '\n' + certificate.meta.intermediate_certificate;
}
try {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
} catch (err) {
reject(err);
return;
}
fs.writeFile(dir + '/fullchain.pem', cert_data, function (err) {
if (err) {
reject(err);
} else {
resolve();
}
});
})
.then(() => {
return new Promise((resolve, reject) => {
fs.writeFile(dir + '/privkey.pem', certificate.meta.certificate_key, function (err) {
if (err) {
reject(err);
} else {
resolve();
}
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Array} data.domain_names
* @param {String} data.meta.letsencrypt_email
* @param {Boolean} data.meta.letsencrypt_agree
* @returns {Promise}
*/
createQuickCertificate: (access, data) => {
return internalCertificate.create(access, {
provider: 'letsencrypt',
domain_names: data.domain_names,
meta: data.meta
});
},
/**
* Validates that the certs provided are good.
* No access required here, nothing is changed or stored.
*
* @param {Object} data
* @param {Object} data.files
* @returns {Promise}
*/
validate: (data) => {
return new Promise((resolve) => {
// Put file contents into an object
let files = {};
_.map(data.files, (file, name) => {
if (internalCertificate.allowed_ssl_files.indexOf(name) !== -1) {
files[name] = file.data.toString();
}
});
resolve(files);
})
.then((files) => {
// For each file, create a temp file and write the contents to it
// Then test it depending on the file type
let promises = [];
_.map(files, (content, type) => {
promises.push(new Promise((resolve) => {
if (type === 'certificate_key') {
resolve(internalCertificate.checkPrivateKey(content));
} else {
// this should handle `certificate` and intermediate certificate
resolve(internalCertificate.getCertificateInfo(content, true));
}
}).then((res) => {
return {[type]: res};
}));
});
return Promise.all(promises)
.then((files) => {
let data = {};
_.each(files, (file) => {
data = _.assign({}, data, file);
});
return data;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Object} data.files
* @returns {Promise}
*/
upload: (access, data) => {
return internalCertificate.get(access, {id: data.id})
.then((row) => {
if (row.provider !== 'other') {
throw new error.ValidationError('Cannot upload certificates for this type of provider');
}
return internalCertificate.validate(data)
.then((validations) => {
if (typeof validations.certificate === 'undefined') {
throw new error.ValidationError('Certificate file was not provided');
}
_.map(data.files, (file, name) => {
if (internalCertificate.allowed_ssl_files.indexOf(name) !== -1) {
row.meta[name] = file.data.toString();
}
});
// TODO: This uses a mysql only raw function that won't translate to postgres
return internalCertificate.update(access, {
id: data.id,
expires_on: certificateModel.raw('FROM_UNIXTIME(' + validations.certificate.dates.to + ')'),
domain_names: [validations.certificate.cn],
meta: _.clone(row.meta) // Prevent the update method from changing this value that we'll use later
})
.then((certificate) => {
console.log('ROWMETA:', row.meta);
certificate.meta = row.meta;
return internalCertificate.writeCustomCert(certificate);
});
})
.then(() => {
return _.pick(row.meta, internalCertificate.allowed_ssl_files);
});
});
},
/**
* Uses the openssl command to validate the private key.
* It will save the file to disk first, then run commands on it, then delete the file.
*
* @param {String} private_key This is the entire key contents as a string
*/
checkPrivateKey: (private_key) => {
return tempWrite(private_key, '/tmp')
.then((filepath) => {
return utils.exec('openssl rsa -in ' + filepath + ' -check -noout')
.then((result) => {
if (!result.toLowerCase().includes('key ok')) {
throw new error.ValidationError(result);
}
fs.unlinkSync(filepath);
return true;
}).catch((err) => {
fs.unlinkSync(filepath);
throw new error.ValidationError('Certificate Key is not valid (' + err.message + ')', err);
});
});
},
/**
* Uses the openssl command to both validate and get info out of the certificate.
* It will save the file to disk first, then run commands on it, then delete the file.
*
* @param {String} certificate This is the entire cert contents as a string
* @param {Boolean} [throw_expired] Throw when the certificate is out of date
*/
getCertificateInfo: (certificate, throw_expired) => {
return tempWrite(certificate, '/tmp')
.then((filepath) => {
return internalCertificate.getCertificateInfoFromFile(filepath, throw_expired)
.then((cert_data) => {
fs.unlinkSync(filepath);
return cert_data;
}).catch((err) => {
fs.unlinkSync(filepath);
throw err;
});
});
},
/**
* Uses the openssl command to both validate and get info out of the certificate.
* It will save the file to disk first, then run commands on it, then delete the file.
*
* @param {String} certificate_file The file location on disk
* @param {Boolean} [throw_expired] Throw when the certificate is out of date
*/
getCertificateInfoFromFile: (certificate_file, throw_expired) => {
let cert_data = {};
return utils.exec('openssl x509 -in ' + certificate_file + ' -subject -noout')
.then((result) => {
// subject=CN = something.example.com
let regex = /(?:subject=)?[^=]+=\s+(\S+)/gim;
let match = regex.exec(result);
if (typeof match[1] === 'undefined') {
throw new error.ValidationError('Could not determine subject from certificate: ' + result);
}
cert_data['cn'] = match[1];
})
.then(() => {
return utils.exec('openssl x509 -in ' + certificate_file + ' -issuer -noout');
})
.then((result) => {
// issuer=C = US, O = Let's Encrypt, CN = Let's Encrypt Authority X3
let regex = /^(?:issuer=)?(.*)$/gim;
let match = regex.exec(result);
if (typeof match[1] === 'undefined') {
throw new error.ValidationError('Could not determine issuer from certificate: ' + result);
}
cert_data['issuer'] = match[1];
})
.then(() => {
return utils.exec('openssl x509 -in ' + certificate_file + ' -dates -noout');
})
.then((result) => {
// notBefore=Jul 14 04:04:29 2018 GMT
// notAfter=Oct 12 04:04:29 2018 GMT
let valid_from = null;
let valid_to = null;
let lines = result.split('\n');
lines.map(function (str) {
let regex = /^(\S+)=(.*)$/gim;
let match = regex.exec(str.trim());
if (match && typeof match[2] !== 'undefined') {
let date = parseInt(moment(match[2], 'MMM DD HH:mm:ss YYYY z').format('X'), 10);
if (match[1].toLowerCase() === 'notbefore') {
valid_from = date;
} else if (match[1].toLowerCase() === 'notafter') {
valid_to = date;
}
}
});
if (!valid_from || !valid_to) {
throw new error.ValidationError('Could not determine dates from certificate: ' + result);
}
if (throw_expired && valid_to < parseInt(moment().format('X'), 10)) {
throw new error.ValidationError('Certificate has expired');
}
cert_data['dates'] = {
from: valid_from,
to: valid_to
};
return cert_data;
}).catch((err) => {
throw new error.ValidationError('Certificate is not valid (' + err.message + ')', err);
});
},
/**
* Cleans the ssl keys from the meta object and sets them to "true"
*
* @param {Object} meta
* @param {Boolean} [remove]
* @returns {Object}
*/
cleanMeta: function (meta, remove) {
internalCertificate.allowed_ssl_files.map((key) => {
if (typeof meta[key] !== 'undefined' && meta[key]) {
if (remove) {
delete meta[key];
} else {
meta[key] = true;
}
}
});
return meta;
},
/**
* @param {Object} certificate the certificate row
* @returns {Promise}
*/
requestLetsEncryptSsl: (certificate) => {
logger.info('Requesting Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
let cmd = certbot_command + ' certonly --non-interactive ' +
'--config "' + le_config + '" ' +
'--cert-name "npm-' + certificate.id + '" ' +
'--agree-tos ' +
'--email "' + certificate.meta.letsencrypt_email + '" ' +
'--preferred-challenges "dns,http" ' +
'--webroot ' +
'--domains "' + certificate.domain_names.join(',') + '" ' +
(le_staging ? '--staging' : '');
if (debug_mode) {
logger.info('Command:', cmd);
}
return utils.exec(cmd)
.then((result) => {
logger.success(result);
return result;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @returns {Promise}
*/
renew: (access, data) => {
return access.can('certificates:update', data)
.then(() => {
return internalCertificate.get(access, data);
})
.then((certificate) => {
if (certificate.provider === 'letsencrypt') {
return internalCertificate.renewLetsEncryptSsl(certificate)
.then(() => {
return internalCertificate.getCertificateInfoFromFile('/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem');
})
.then((cert_info) => {
return certificateModel
.query()
.patchAndFetchById(certificate.id, {
expires_on: certificateModel.raw('FROM_UNIXTIME(' + cert_info.dates.to + ')')
});
})
.then((updated_certificate) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'renewed',
object_type: 'certificate',
object_id: updated_certificate.id,
meta: updated_certificate
})
.then(() => {
return updated_certificate;
});
});
} else {
throw new error.ValidationError('Only Let\'sEncrypt certificates can be renewed');
}
});
},
/**
* @param {Object} certificate the certificate row
* @returns {Promise}
*/
renewLetsEncryptSsl: (certificate) => {
logger.info('Renewing Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
let cmd = certbot_command + ' renew --non-interactive ' +
'--config "' + le_config + '" ' +
'--cert-name "npm-' + certificate.id + '" ' +
'--preferred-challenges "dns,http" ' +
'--disable-hook-validation ' +
(le_staging ? '--staging' : '');
if (debug_mode) {
logger.info('Command:', cmd);
}
return utils.exec(cmd)
.then((result) => {
logger.info(result);
return result;
});
},
/**
* @param {Object} certificate the certificate row
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
revokeLetsEncryptSsl: (certificate, throw_errors) => {
logger.info('Revoking Let\'sEncrypt certificates for Cert #' + certificate.id + ': ' + certificate.domain_names.join(', '));
let cmd = certbot_command + ' revoke --non-interactive ' +
'--config "' + le_config + '" ' +
'--cert-path "/etc/letsencrypt/live/npm-' + certificate.id + '/fullchain.pem" ' +
'--delete-after-revoke ' +
(le_staging ? '--staging' : '');
if (debug_mode) {
logger.info('Command:', cmd);
}
return utils.exec(cmd)
.then((result) => {
if (debug_mode) {
logger.info('Command:', cmd);
}
logger.info(result);
return result;
})
.catch((err) => {
if (debug_mode) {
logger.error(err.message);
}
if (throw_errors) {
throw err;
}
});
},
/**
* @param {Object} certificate
* @returns {Boolean}
*/
hasLetsEncryptSslCerts: (certificate) => {
let le_path = '/etc/letsencrypt/live/npm-' + certificate.id;
return fs.existsSync(le_path + '/fullchain.pem') && fs.existsSync(le_path + '/privkey.pem');
},
/**
* @param {Object} in_use_result
* @param {Number} in_use_result.total_count
* @param {Array} in_use_result.proxy_hosts
* @param {Array} in_use_result.redirection_hosts
* @param {Array} in_use_result.dead_hosts
*/
disableInUseHosts: (in_use_result) => {
if (in_use_result.total_count) {
let promises = [];
if (in_use_result.proxy_hosts.length) {
promises.push(internalNginx.bulkDeleteConfigs('proxy_host', in_use_result.proxy_hosts));
}
if (in_use_result.redirection_hosts.length) {
promises.push(internalNginx.bulkDeleteConfigs('redirection_host', in_use_result.redirection_hosts));
}
if (in_use_result.dead_hosts.length) {
promises.push(internalNginx.bulkDeleteConfigs('dead_host', in_use_result.dead_hosts));
}
return Promise.all(promises);
} else {
return Promise.resolve();
}
},
/**
* @param {Object} in_use_result
* @param {Number} in_use_result.total_count
* @param {Array} in_use_result.proxy_hosts
* @param {Array} in_use_result.redirection_hosts
* @param {Array} in_use_result.dead_hosts
*/
enableInUseHosts: (in_use_result) => {
if (in_use_result.total_count) {
let promises = [];
if (in_use_result.proxy_hosts.length) {
promises.push(internalNginx.bulkGenerateConfigs('proxy_host', in_use_result.proxy_hosts));
}
if (in_use_result.redirection_hosts.length) {
promises.push(internalNginx.bulkGenerateConfigs('redirection_host', in_use_result.redirection_hosts));
}
if (in_use_result.dead_hosts.length) {
promises.push(internalNginx.bulkGenerateConfigs('dead_host', in_use_result.dead_hosts));
}
return Promise.all(promises);
} else {
return Promise.resolve();
}
}
};
module.exports = internalCertificate;

View File

@ -0,0 +1,461 @@
const _ = require('lodash');
const error = require('../lib/error');
const deadHostModel = require('../models/dead_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const internalCertificate = require('./certificate');
function omissions () {
return ['is_deleted'];
}
const internalDeadHost = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
let create_certificate = data.certificate_id === 'new';
if (create_certificate) {
delete data.certificate_id;
}
return access.can('dead_hosts:create', data)
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = [];
data.domain_names.map(function (domain_name) {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
});
return Promise.all(domain_name_check_promises)
.then((check_results) => {
check_results.map(function (result) {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
});
})
.then(() => {
// At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data);
return deadHostModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, data)
.then((cert) => {
// update host with cert id
return internalDeadHost.update(access, {
id: row.id,
certificate_id: cert.id
});
})
.then(() => {
return row;
});
} else {
return row;
}
})
.then((row) => {
// re-fetch with cert
return internalDeadHost.get(access, {
id: row.id,
expand: ['certificate', 'owner']
});
})
.then((row) => {
// Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row)
.then(() => {
return row;
});
})
.then((row) => {
data.meta = _.assign({}, data.meta || {}, row.meta);
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'dead-host',
object_id: row.id,
meta: data
})
.then(() => {
return row;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @return {Promise}
*/
update: (access, data) => {
let create_certificate = data.certificate_id === 'new';
if (create_certificate) {
delete data.certificate_id;
}
return access.can('dead_hosts:update', data.id)
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') {
data.domain_names.map(function (domain_name) {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'dead', data.id));
});
return Promise.all(domain_name_check_promises)
.then((check_results) => {
check_results.map(function (result) {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
});
}
})
.then(() => {
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('404 Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, {
domain_names: data.domain_names || row.domain_names,
meta: _.assign({}, row.meta, data.meta)
})
.then((cert) => {
// update host with cert id
data.certificate_id = cert.id;
})
.then(() => {
return row;
});
} else {
return row;
}
})
.then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, {
domain_names: row.domain_names
}, data);
data = internalHost.cleanSslHstsData(data, row);
return deadHostModel
.query()
.where({id: data.id})
.patch(data)
.then((saved_row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'dead-host',
object_id: row.id,
meta: data
})
.then(() => {
return _.omit(saved_row, omissions());
});
});
})
.then(() => {
return internalDeadHost.get(access, {
id: data.id,
expand: ['owner', 'certificate']
})
.then((row) => {
// Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row)
.then((new_meta) => {
row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('dead_hosts:get', data.id)
.then((access_data) => {
let query = deadHostModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[owner,certificate]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('dead_hosts:delete', data.id)
.then(() => {
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
return deadHostModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('dead_host', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'dead-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
enable: (access, data) => {
return access.can('dead_hosts:update', data.id)
.then(() => {
return internalDeadHost.get(access, {
id: data.id,
expand: ['certificate', 'owner']
});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (row.enabled) {
throw new error.ValidationError('Host is already enabled');
}
row.enabled = 1;
return deadHostModel
.query()
.where('id', row.id)
.patch({
enabled: 1
})
.then(() => {
// Configure nginx
return internalNginx.configure(deadHostModel, 'dead_host', row);
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'enabled',
object_type: 'dead-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
disable: (access, data) => {
return access.can('dead_hosts:update', data.id)
.then(() => {
return internalDeadHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (!row.enabled) {
throw new error.ValidationError('Host is already disabled');
}
row.enabled = 0;
return deadHostModel
.query()
.where('id', row.id)
.patch({
enabled: 0
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('dead_host', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'disabled',
object_type: 'dead-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* All Hosts
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('dead_hosts:list')
.then((access_data) => {
let query = deadHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[owner,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('domain_names', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
})
.then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows);
}
return rows;
});
},
/**
* Report use
*
* @param {Number} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = deadHostModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
}
};
module.exports = internalDeadHost;

235
backend/internal/host.js Normal file
View File

@ -0,0 +1,235 @@
const _ = require('lodash');
const proxyHostModel = require('../models/proxy_host');
const redirectionHostModel = require('../models/redirection_host');
const deadHostModel = require('../models/dead_host');
const internalHost = {
/**
* Makes sure that the ssl_* and hsts_* fields play nicely together.
* ie: if there is no cert, then force_ssl is off.
* if force_ssl is off, then hsts_enabled is definitely off.
*
* @param {object} data
* @param {object} [existing_data]
* @returns {object}
*/
cleanSslHstsData: function (data, existing_data) {
existing_data = existing_data === undefined ? {} : existing_data;
let combined_data = _.assign({}, existing_data, data);
if (!combined_data.certificate_id) {
combined_data.ssl_forced = false;
combined_data.http2_support = false;
}
if (!combined_data.ssl_forced) {
combined_data.hsts_enabled = false;
}
if (!combined_data.hsts_enabled) {
combined_data.hsts_subdomains = false;
}
return combined_data;
},
/**
* used by the getAll functions of hosts, this removes the certificate meta if present
*
* @param {Array} rows
* @returns {Array}
*/
cleanAllRowsCertificateMeta: function (rows) {
rows.map(function (row, idx) {
if (typeof rows[idx].certificate !== 'undefined' && rows[idx].certificate) {
rows[idx].certificate.meta = {};
}
});
return rows;
},
/**
* used by the get/update functions of hosts, this removes the certificate meta if present
*
* @param {Object} row
* @returns {Object}
*/
cleanRowCertificateMeta: function (row) {
if (typeof row.certificate !== 'undefined' && row.certificate) {
row.certificate.meta = {};
}
return row;
},
/**
* This returns all the host types with any domain listed in the provided domain_names array.
* This is used by the certificates to temporarily disable any host that is using the domain
*
* @param {Array} domain_names
* @returns {Promise}
*/
getHostsWithDomains: function (domain_names) {
let promises = [
proxyHostModel
.query()
.where('is_deleted', 0),
redirectionHostModel
.query()
.where('is_deleted', 0),
deadHostModel
.query()
.where('is_deleted', 0)
];
return Promise.all(promises)
.then((promises_results) => {
let response_object = {
total_count: 0,
dead_hosts: [],
proxy_hosts: [],
redirection_hosts: []
};
if (promises_results[0]) {
// Proxy Hosts
response_object.proxy_hosts = internalHost._getHostsWithDomains(promises_results[0], domain_names);
response_object.total_count += response_object.proxy_hosts.length;
}
if (promises_results[1]) {
// Redirection Hosts
response_object.redirection_hosts = internalHost._getHostsWithDomains(promises_results[1], domain_names);
response_object.total_count += response_object.redirection_hosts.length;
}
if (promises_results[1]) {
// Dead Hosts
response_object.dead_hosts = internalHost._getHostsWithDomains(promises_results[2], domain_names);
response_object.total_count += response_object.dead_hosts.length;
}
return response_object;
});
},
/**
* Internal use only, checks to see if the domain is already taken by any other record
*
* @param {String} hostname
* @param {String} [ignore_type] 'proxy', 'redirection', 'dead'
* @param {Integer} [ignore_id] Must be supplied if type was also supplied
* @returns {Promise}
*/
isHostnameTaken: function (hostname, ignore_type, ignore_id) {
let promises = [
proxyHostModel
.query()
.where('is_deleted', 0)
.andWhere('domain_names', 'like', '%' + hostname + '%'),
redirectionHostModel
.query()
.where('is_deleted', 0)
.andWhere('domain_names', 'like', '%' + hostname + '%'),
deadHostModel
.query()
.where('is_deleted', 0)
.andWhere('domain_names', 'like', '%' + hostname + '%')
];
return Promise.all(promises)
.then((promises_results) => {
let is_taken = false;
if (promises_results[0]) {
// Proxy Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[0], ignore_type === 'proxy' && ignore_id ? ignore_id : 0)) {
is_taken = true;
}
}
if (promises_results[1]) {
// Redirection Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[1], ignore_type === 'redirection' && ignore_id ? ignore_id : 0)) {
is_taken = true;
}
}
if (promises_results[1]) {
// Dead Hosts
if (internalHost._checkHostnameRecordsTaken(hostname, promises_results[2], ignore_type === 'dead' && ignore_id ? ignore_id : 0)) {
is_taken = true;
}
}
return {
hostname: hostname,
is_taken: is_taken
};
});
},
/**
* Private call only
*
* @param {String} hostname
* @param {Array} existing_rows
* @param {Integer} [ignore_id]
* @returns {Boolean}
*/
_checkHostnameRecordsTaken: function (hostname, existing_rows, ignore_id) {
let is_taken = false;
if (existing_rows && existing_rows.length) {
existing_rows.map(function (existing_row) {
existing_row.domain_names.map(function (existing_hostname) {
// Does this domain match?
if (existing_hostname.toLowerCase() === hostname.toLowerCase()) {
if (!ignore_id || ignore_id !== existing_row.id) {
is_taken = true;
}
}
});
});
}
return is_taken;
},
/**
* Private call only
*
* @param {Array} hosts
* @param {Array} domain_names
* @returns {Array}
*/
_getHostsWithDomains: function (hosts, domain_names) {
let response = [];
if (hosts && hosts.length) {
hosts.map(function (host) {
let host_matches = false;
domain_names.map(function (domain_name) {
host.domain_names.map(function (host_domain_name) {
if (domain_name.toLowerCase() === host_domain_name.toLowerCase()) {
host_matches = true;
}
});
});
if (host_matches) {
response.push(host);
}
});
}
return response;
}
};
module.exports = internalHost;

View File

@ -0,0 +1,147 @@
const https = require('https');
const fs = require('fs');
const logger = require('../logger').ip_ranges;
const error = require('../lib/error');
const internalNginx = require('./nginx');
const Liquid = require('liquidjs');
const CLOUDFRONT_URL = 'https://ip-ranges.amazonaws.com/ip-ranges.json';
const CLOUDFARE_V4_URL = 'https://www.cloudflare.com/ips-v4';
const CLOUDFARE_V6_URL = 'https://www.cloudflare.com/ips-v6';
const internalIpRanges = {
interval_timeout: 1000 * 60 * 60 * 6, // 6 hours
interval: null,
interval_processing: false,
iteration_count: 0,
initTimer: () => {
logger.info('IP Ranges Renewal Timer initialized');
internalIpRanges.interval = setInterval(internalIpRanges.fetch, internalIpRanges.interval_timeout);
},
fetchUrl: (url) => {
return new Promise((resolve, reject) => {
logger.info('Fetching ' + url);
return https.get(url, (res) => {
res.setEncoding('utf8');
let raw_data = '';
res.on('data', (chunk) => {
raw_data += chunk;
});
res.on('end', () => {
resolve(raw_data);
});
}).on('error', (err) => {
reject(err);
});
});
},
/**
* Triggered at startup and then later by a timer, this will fetch the ip ranges from services and apply them to nginx.
*/
fetch: () => {
if (!internalIpRanges.interval_processing) {
internalIpRanges.interval_processing = true;
logger.info('Fetching IP Ranges from online services...');
let ip_ranges = [];
return internalIpRanges.fetchUrl(CLOUDFRONT_URL)
.then((cloudfront_data) => {
let data = JSON.parse(cloudfront_data);
if (data && typeof data.prefixes !== 'undefined') {
data.prefixes.map((item) => {
if (item.service === 'CLOUDFRONT') {
ip_ranges.push(item.ip_prefix);
}
});
}
if (data && typeof data.ipv6_prefixes !== 'undefined') {
data.ipv6_prefixes.map((item) => {
if (item.service === 'CLOUDFRONT') {
ip_ranges.push(item.ipv6_prefix);
}
});
}
})
.then(() => {
return internalIpRanges.fetchUrl(CLOUDFARE_V4_URL);
})
.then((cloudfare_data) => {
let items = cloudfare_data.split('\n');
ip_ranges = [... ip_ranges, ... items];
})
.then(() => {
return internalIpRanges.fetchUrl(CLOUDFARE_V6_URL);
})
.then((cloudfare_data) => {
let items = cloudfare_data.split('\n');
ip_ranges = [... ip_ranges, ... items];
})
.then(() => {
let clean_ip_ranges = [];
ip_ranges.map((range) => {
if (range) {
clean_ip_ranges.push(range);
}
});
return internalIpRanges.generateConfig(clean_ip_ranges)
.then(() => {
if (internalIpRanges.iteration_count) {
// Reload nginx
return internalNginx.reload();
}
});
})
.then(() => {
internalIpRanges.interval_processing = false;
internalIpRanges.iteration_count++;
})
.catch((err) => {
logger.error(err.message);
internalIpRanges.interval_processing = false;
});
}
},
/**
* @param {Array} ip_ranges
* @returns {Promise}
*/
generateConfig: (ip_ranges) => {
let renderEngine = Liquid({
root: __dirname + '/../templates/'
});
return new Promise((resolve, reject) => {
let template = null;
let filename = '/etc/nginx/conf.d/include/ip_ranges.conf';
try {
template = fs.readFileSync(__dirname + '/../templates/ip_ranges.conf', {encoding: 'utf8'});
} catch (err) {
reject(new error.ConfigurationError(err.message));
return;
}
renderEngine
.parseAndRender(template, {ip_ranges: ip_ranges})
.then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
resolve(true);
})
.catch((err) => {
logger.warn('Could not write ' + filename + ':', err.message);
reject(new error.ConfigurationError(err.message));
});
});
}
};
module.exports = internalIpRanges;

402
backend/internal/nginx.js Normal file
View File

@ -0,0 +1,402 @@
const _ = require('lodash');
const fs = require('fs');
const Liquid = require('liquidjs');
const logger = require('../logger').nginx;
const utils = require('../lib/utils');
const error = require('../lib/error');
const debug_mode = process.env.NODE_ENV !== 'production' || !!process.env.DEBUG;
const internalNginx = {
/**
* This will:
* - test the nginx config first to make sure it's OK
* - create / recreate the config for the host
* - test again
* - IF OK: update the meta with online status
* - IF BAD: update the meta with offline status and remove the config entirely
* - then reload nginx
*
* @param {Object|String} model
* @param {String} host_type
* @param {Object} host
* @returns {Promise}
*/
configure: (model, host_type, host) => {
let combined_meta = {};
return internalNginx.test()
.then(() => {
// Nginx is OK
// We're deleting this config regardless.
return internalNginx.deleteConfig(host_type, host); // Don't throw errors, as the file may not exist at all
})
.then(() => {
return internalNginx.generateConfig(host_type, host);
})
.then(() => {
// Test nginx again and update meta with result
return internalNginx.test()
.then(() => {
// nginx is ok
combined_meta = _.assign({}, host.meta, {
nginx_online: true,
nginx_err: null
});
return model
.query()
.where('id', host.id)
.patch({
meta: combined_meta
});
})
.catch((err) => {
// Remove the error_log line because it's a docker-ism false positive that doesn't need to be reported.
// It will always look like this:
// nginx: [alert] could not open error log file: open() "/var/log/nginx/error.log" failed (6: No such device or address)
let valid_lines = [];
let err_lines = err.message.split('\n');
err_lines.map(function (line) {
if (line.indexOf('/var/log/nginx/error.log') === -1) {
valid_lines.push(line);
}
});
if (debug_mode) {
logger.error('Nginx test failed:', valid_lines.join('\n'));
}
// config is bad, update meta and delete config
combined_meta = _.assign({}, host.meta, {
nginx_online: false,
nginx_err: valid_lines.join('\n')
});
return model
.query()
.where('id', host.id)
.patch({
meta: combined_meta
})
.then(() => {
return internalNginx.deleteConfig(host_type, host, true);
});
});
})
.then(() => {
return internalNginx.reload();
})
.then(() => {
return combined_meta;
});
},
/**
* @returns {Promise}
*/
test: () => {
if (debug_mode) {
logger.info('Testing Nginx configuration');
}
return utils.exec('/usr/sbin/nginx -t -g "error_log off;"');
},
/**
* @returns {Promise}
*/
reload: () => {
return internalNginx.test()
.then(() => {
logger.info('Reloading Nginx');
return utils.exec('/usr/sbin/nginx -s reload');
});
},
/**
* @param {String} host_type
* @param {Integer} host_id
* @returns {String}
*/
getConfigName: (host_type, host_id) => {
host_type = host_type.replace(new RegExp('-', 'g'), '_');
if (host_type === 'default') {
return '/data/nginx/default_host/site.conf';
}
return '/data/nginx/' + host_type + '/' + host_id + '.conf';
},
/**
* Generates custom locations
* @param {Object} host
* @returns {Promise}
*/
renderLocations: (host) => {
return new Promise((resolve, reject) => {
let template;
try {
template = fs.readFileSync(__dirname + '/../templates/_location.conf', {encoding: 'utf8'});
} catch (err) {
reject(new error.ConfigurationError(err.message));
return;
}
let renderer = new Liquid();
let renderedLocations = '';
const locationRendering = async () => {
for (let i = 0; i < host.locations.length; i++) {
let locationCopy = Object.assign({}, host.locations[i]);
if (locationCopy.forward_host.indexOf('/') > -1) {
const splitted = locationCopy.forward_host.split('/');
locationCopy.forward_host = splitted.shift();
locationCopy.forward_path = `/${splitted.join('/')}`;
}
// eslint-disable-next-line
renderedLocations += await renderer.parseAndRender(template, locationCopy);
}
};
locationRendering().then(() => resolve(renderedLocations));
});
},
/**
* @param {String} host_type
* @param {Object} host
* @returns {Promise}
*/
generateConfig: (host_type, host) => {
host_type = host_type.replace(new RegExp('-', 'g'), '_');
if (debug_mode) {
logger.info('Generating ' + host_type + ' Config:', host);
}
let renderEngine = Liquid({
root: __dirname + '/../templates/'
});
return new Promise((resolve, reject) => {
let template = null;
let filename = internalNginx.getConfigName(host_type, host.id);
try {
template = fs.readFileSync(__dirname + '/../templates/' + host_type + '.conf', {encoding: 'utf8'});
} catch (err) {
reject(new error.ConfigurationError(err.message));
return;
}
let locationsPromise;
let origLocations;
// Manipulate the data a bit before sending it to the template
if (host_type !== 'default') {
host.use_default_location = true;
if (typeof host.advanced_config !== 'undefined' && host.advanced_config) {
host.use_default_location = !internalNginx.advancedConfigHasDefaultLocation(host.advanced_config);
}
}
if (host.locations) {
origLocations = [].concat(host.locations);
locationsPromise = internalNginx.renderLocations(host).then((renderedLocations) => {
host.locations = renderedLocations;
});
// Allow someone who is using / custom location path to use it, and skip the default / location
_.map(host.locations, (location) => {
if (location.path === '/') {
host.use_default_location = false;
}
});
} else {
locationsPromise = Promise.resolve();
}
locationsPromise.then(() => {
renderEngine
.parseAndRender(template, host)
.then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
if (debug_mode) {
logger.success('Wrote config:', filename, config_text);
}
// Restore locations array
host.locations = origLocations;
resolve(true);
})
.catch((err) => {
if (debug_mode) {
logger.warn('Could not write ' + filename + ':', err.message);
}
reject(new error.ConfigurationError(err.message));
});
});
});
},
/**
* This generates a temporary nginx config listening on port 80 for the domain names listed
* in the certificate setup. It allows the letsencrypt acme challenge to be requested by letsencrypt
* when requesting a certificate without having a hostname set up already.
*
* @param {Object} certificate
* @returns {Promise}
*/
generateLetsEncryptRequestConfig: (certificate) => {
if (debug_mode) {
logger.info('Generating LetsEncrypt Request Config:', certificate);
}
let renderEngine = Liquid({
root: __dirname + '/../templates/'
});
return new Promise((resolve, reject) => {
let template = null;
let filename = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
try {
template = fs.readFileSync(__dirname + '/../templates/letsencrypt-request.conf', {encoding: 'utf8'});
} catch (err) {
reject(new error.ConfigurationError(err.message));
return;
}
renderEngine
.parseAndRender(template, certificate)
.then((config_text) => {
fs.writeFileSync(filename, config_text, {encoding: 'utf8'});
if (debug_mode) {
logger.success('Wrote config:', filename, config_text);
}
resolve(true);
})
.catch((err) => {
if (debug_mode) {
logger.warn('Could not write ' + filename + ':', err.message);
}
reject(new error.ConfigurationError(err.message));
});
});
},
/**
* This removes the temporary nginx config file generated by `generateLetsEncryptRequestConfig`
*
* @param {Object} certificate
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
deleteLetsEncryptRequestConfig: (certificate, throw_errors) => {
return new Promise((resolve, reject) => {
try {
let config_file = '/data/nginx/temp/letsencrypt_' + certificate.id + '.conf';
if (debug_mode) {
logger.warn('Deleting nginx config: ' + config_file);
}
fs.unlinkSync(config_file);
} catch (err) {
if (debug_mode) {
logger.warn('Could not delete config:', err.message);
}
if (throw_errors) {
reject(err);
}
}
resolve();
});
},
/**
* @param {String} host_type
* @param {Object} [host]
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
deleteConfig: (host_type, host, throw_errors) => {
host_type = host_type.replace(new RegExp('-', 'g'), '_');
return new Promise((resolve, reject) => {
try {
let config_file = internalNginx.getConfigName(host_type, typeof host === 'undefined' ? 0 : host.id);
if (debug_mode) {
logger.warn('Deleting nginx config: ' + config_file);
}
fs.unlinkSync(config_file);
} catch (err) {
if (debug_mode) {
logger.warn('Could not delete config:', err.message);
}
if (throw_errors) {
reject(err);
}
}
resolve();
});
},
/**
* @param {String} host_type
* @param {Array} hosts
* @returns {Promise}
*/
bulkGenerateConfigs: (host_type, hosts) => {
let promises = [];
hosts.map(function (host) {
promises.push(internalNginx.generateConfig(host_type, host));
});
return Promise.all(promises);
},
/**
* @param {String} host_type
* @param {Array} hosts
* @param {Boolean} [throw_errors]
* @returns {Promise}
*/
bulkDeleteConfigs: (host_type, hosts, throw_errors) => {
let promises = [];
hosts.map(function (host) {
promises.push(internalNginx.deleteConfig(host_type, host, throw_errors));
});
return Promise.all(promises);
},
/**
* @param {string} config
* @returns {boolean}
*/
advancedConfigHasDefaultLocation: function (config) {
return !!config.match(/^(?:.*;)?\s*?location\s*?\/\s*?{/im);
}
};
module.exports = internalNginx;

View File

@ -0,0 +1,462 @@
const _ = require('lodash');
const error = require('../lib/error');
const proxyHostModel = require('../models/proxy_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const internalCertificate = require('./certificate');
function omissions () {
return ['is_deleted'];
}
const internalProxyHost = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
let create_certificate = data.certificate_id === 'new';
if (create_certificate) {
delete data.certificate_id;
}
return access.can('proxy_hosts:create', data)
.then(() => {
// Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = [];
data.domain_names.map(function (domain_name) {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
});
return Promise.all(domain_name_check_promises)
.then((check_results) => {
check_results.map(function (result) {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
});
})
.then(() => {
// At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data);
return proxyHostModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, data)
.then((cert) => {
// update host with cert id
return internalProxyHost.update(access, {
id: row.id,
certificate_id: cert.id
});
})
.then(() => {
return row;
});
} else {
return row;
}
})
.then((row) => {
// re-fetch with cert
return internalProxyHost.get(access, {
id: row.id,
expand: ['certificate', 'owner', 'access_list']
});
})
.then((row) => {
// Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row)
.then(() => {
return row;
});
})
.then((row) => {
// Audit log
data.meta = _.assign({}, data.meta || {}, row.meta);
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'proxy-host',
object_id: row.id,
meta: data
})
.then(() => {
return row;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @return {Promise}
*/
update: (access, data) => {
let create_certificate = data.certificate_id === 'new';
if (create_certificate) {
delete data.certificate_id;
}
return access.can('proxy_hosts:update', data.id)
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') {
data.domain_names.map(function (domain_name) {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'proxy', data.id));
});
return Promise.all(domain_name_check_promises)
.then((check_results) => {
check_results.map(function (result) {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
});
}
})
.then(() => {
return internalProxyHost.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Proxy Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, {
domain_names: data.domain_names || row.domain_names,
meta: _.assign({}, row.meta, data.meta)
})
.then((cert) => {
// update host with cert id
data.certificate_id = cert.id;
})
.then(() => {
return row;
});
} else {
return row;
}
})
.then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, {
domain_names: row.domain_names
}, data);
data = internalHost.cleanSslHstsData(data, row);
return proxyHostModel
.query()
.where({id: data.id})
.patch(data)
.then((saved_row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'proxy-host',
object_id: row.id,
meta: data
})
.then(() => {
return _.omit(saved_row, omissions());
});
});
})
.then(() => {
return internalProxyHost.get(access, {
id: data.id,
expand: ['owner', 'certificate', 'access_list']
})
.then((row) => {
// Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row)
.then((new_meta) => {
row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('proxy_hosts:get', data.id)
.then((access_data) => {
let query = proxyHostModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[owner,access_list,certificate]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('proxy_hosts:delete', data.id)
.then(() => {
return internalProxyHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
return proxyHostModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('proxy_host', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'proxy-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
enable: (access, data) => {
return access.can('proxy_hosts:update', data.id)
.then(() => {
return internalProxyHost.get(access, {
id: data.id,
expand: ['certificate', 'owner', 'access_list']
});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (row.enabled) {
throw new error.ValidationError('Host is already enabled');
}
row.enabled = 1;
return proxyHostModel
.query()
.where('id', row.id)
.patch({
enabled: 1
})
.then(() => {
// Configure nginx
return internalNginx.configure(proxyHostModel, 'proxy_host', row);
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'enabled',
object_type: 'proxy-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
disable: (access, data) => {
return access.can('proxy_hosts:update', data.id)
.then(() => {
return internalProxyHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (!row.enabled) {
throw new error.ValidationError('Host is already disabled');
}
row.enabled = 0;
return proxyHostModel
.query()
.where('id', row.id)
.patch({
enabled: 0
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('proxy_host', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'disabled',
object_type: 'proxy-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* All Hosts
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('proxy_hosts:list')
.then((access_data) => {
let query = proxyHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[owner,access_list,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('domain_names', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
})
.then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows);
}
return rows;
});
},
/**
* Report use
*
* @param {Number} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = proxyHostModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
}
};
module.exports = internalProxyHost;

View File

@ -0,0 +1,461 @@
const _ = require('lodash');
const error = require('../lib/error');
const redirectionHostModel = require('../models/redirection_host');
const internalHost = require('./host');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
const internalCertificate = require('./certificate');
function omissions () {
return ['is_deleted'];
}
const internalRedirectionHost = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
let create_certificate = data.certificate_id === 'new';
if (create_certificate) {
delete data.certificate_id;
}
return access.can('redirection_hosts:create', data)
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = [];
data.domain_names.map(function (domain_name) {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name));
});
return Promise.all(domain_name_check_promises)
.then((check_results) => {
check_results.map(function (result) {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
});
})
.then(() => {
// At this point the domains should have been checked
data.owner_user_id = access.token.getUserId(1);
data = internalHost.cleanSslHstsData(data);
return redirectionHostModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, data)
.then((cert) => {
// update host with cert id
return internalRedirectionHost.update(access, {
id: row.id,
certificate_id: cert.id
});
})
.then(() => {
return row;
});
} else {
return row;
}
})
.then((row) => {
// re-fetch with cert
return internalRedirectionHost.get(access, {
id: row.id,
expand: ['certificate', 'owner']
});
})
.then((row) => {
// Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
.then(() => {
return row;
});
})
.then((row) => {
data.meta = _.assign({}, data.meta || {}, row.meta);
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'redirection-host',
object_id: row.id,
meta: data
})
.then(() => {
return row;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @return {Promise}
*/
update: (access, data) => {
let create_certificate = data.certificate_id === 'new';
if (create_certificate) {
delete data.certificate_id;
}
return access.can('redirection_hosts:update', data.id)
.then((/*access_data*/) => {
// Get a list of the domain names and check each of them against existing records
let domain_name_check_promises = [];
if (typeof data.domain_names !== 'undefined') {
data.domain_names.map(function (domain_name) {
domain_name_check_promises.push(internalHost.isHostnameTaken(domain_name, 'redirection', data.id));
});
return Promise.all(domain_name_check_promises)
.then((check_results) => {
check_results.map(function (result) {
if (result.is_taken) {
throw new error.ValidationError(result.hostname + ' is already in use');
}
});
});
}
})
.then(() => {
return internalRedirectionHost.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Redirection Host could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
if (create_certificate) {
return internalCertificate.createQuickCertificate(access, {
domain_names: data.domain_names || row.domain_names,
meta: _.assign({}, row.meta, data.meta)
})
.then((cert) => {
// update host with cert id
data.certificate_id = cert.id;
})
.then(() => {
return row;
});
} else {
return row;
}
})
.then((row) => {
// Add domain_names to the data in case it isn't there, so that the audit log renders correctly. The order is important here.
data = _.assign({}, {
domain_names: row.domain_names
}, data);
data = internalHost.cleanSslHstsData(data, row);
return redirectionHostModel
.query()
.where({id: data.id})
.patch(data)
.then((saved_row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'redirection-host',
object_id: row.id,
meta: data
})
.then(() => {
return _.omit(saved_row, omissions());
});
});
})
.then(() => {
return internalRedirectionHost.get(access, {
id: data.id,
expand: ['owner', 'certificate']
})
.then((row) => {
// Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row)
.then((new_meta) => {
row.meta = new_meta;
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('redirection_hosts:get', data.id)
.then((access_data) => {
let query = redirectionHostModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[owner,certificate]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
row = internalHost.cleanRowCertificateMeta(row);
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('redirection_hosts:delete', data.id)
.then(() => {
return internalRedirectionHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
return redirectionHostModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('redirection_host', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'redirection-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
enable: (access, data) => {
return access.can('redirection_hosts:update', data.id)
.then(() => {
return internalRedirectionHost.get(access, {
id: data.id,
expand: ['certificate', 'owner']
});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (row.enabled) {
throw new error.ValidationError('Host is already enabled');
}
row.enabled = 1;
return redirectionHostModel
.query()
.where('id', row.id)
.patch({
enabled: 1
})
.then(() => {
// Configure nginx
return internalNginx.configure(redirectionHostModel, 'redirection_host', row);
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'enabled',
object_type: 'redirection-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
disable: (access, data) => {
return access.can('redirection_hosts:update', data.id)
.then(() => {
return internalRedirectionHost.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (!row.enabled) {
throw new error.ValidationError('Host is already disabled');
}
row.enabled = 0;
return redirectionHostModel
.query()
.where('id', row.id)
.patch({
enabled: 0
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('redirection_host', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'disabled',
object_type: 'redirection-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* All Hosts
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('redirection_hosts:list')
.then((access_data) => {
let query = redirectionHostModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[owner,certificate]')
.orderBy('domain_names', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('domain_names', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
})
.then((rows) => {
if (typeof expand !== 'undefined' && expand !== null && expand.indexOf('certificate') !== -1) {
return internalHost.cleanAllRowsCertificateMeta(rows);
}
return rows;
});
},
/**
* Report use
*
* @param {Number} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = redirectionHostModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
}
};
module.exports = internalRedirectionHost;

View File

@ -0,0 +1,38 @@
const internalProxyHost = require('./proxy-host');
const internalRedirectionHost = require('./redirection-host');
const internalDeadHost = require('./dead-host');
const internalStream = require('./stream');
const internalReport = {
/**
* @param {Access} access
* @return {Promise}
*/
getHostsReport: (access) => {
return access.can('reports:hosts', 1)
.then((access_data) => {
let user_id = access.token.getUserId(1);
let promises = [
internalProxyHost.getCount(user_id, access_data.visibility),
internalRedirectionHost.getCount(user_id, access_data.visibility),
internalStream.getCount(user_id, access_data.visibility),
internalDeadHost.getCount(user_id, access_data.visibility)
];
return Promise.all(promises);
})
.then((counts) => {
return {
proxy: counts.shift(),
redirection: counts.shift(),
stream: counts.shift(),
dead: counts.shift()
};
});
}
};
module.exports = internalReport;

133
backend/internal/setting.js Normal file
View File

@ -0,0 +1,133 @@
const fs = require('fs');
const error = require('../lib/error');
const settingModel = require('../models/setting');
const internalNginx = require('./nginx');
const internalSetting = {
/**
* @param {Access} access
* @param {Object} data
* @param {String} data.id
* @return {Promise}
*/
update: (access, data) => {
return access.can('settings:update', data.id)
.then((/*access_data*/) => {
return internalSetting.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Setting could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
return settingModel
.query()
.where({id: data.id})
.patch(data);
})
.then(() => {
return internalSetting.get(access, {
id: data.id
});
})
.then((row) => {
if (row.id === 'default-site') {
// write the html if we need to
if (row.value === 'html') {
fs.writeFileSync('/data/nginx/default_www/index.html', row.meta.html, {encoding: 'utf8'});
}
// Configure nginx
return internalNginx.deleteConfig('default')
.then(() => {
return internalNginx.generateConfig('default', row);
})
.then(() => {
return internalNginx.test();
})
.then(() => {
return internalNginx.reload();
})
.then(() => {
return row;
})
.catch((/*err*/) => {
internalNginx.deleteConfig('default')
.then(() => {
return internalNginx.test();
})
.then(() => {
return internalNginx.reload();
})
.then(() => {
// I'm being slack here I know..
throw new error.ValidationError('Could not reconfigure Nginx. Please check logs.');
});
});
} else {
return row;
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {String} data.id
* @return {Promise}
*/
get: (access, data) => {
return access.can('settings:get', data.id)
.then(() => {
return settingModel
.query()
.where('id', data.id)
.first();
})
.then((row) => {
if (row) {
return row;
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* This will only count the settings
*
* @param {Access} access
* @returns {*}
*/
getCount: (access) => {
return access.can('settings:list')
.then(() => {
return settingModel
.query()
.count('id as count')
.first();
})
.then((row) => {
return parseInt(row.count, 10);
});
},
/**
* All settings
*
* @param {Access} access
* @returns {Promise}
*/
getAll: (access) => {
return access.can('settings:list')
.then(() => {
return settingModel
.query()
.orderBy('description', 'ASC');
});
}
};
module.exports = internalSetting;

348
backend/internal/stream.js Normal file
View File

@ -0,0 +1,348 @@
const _ = require('lodash');
const error = require('../lib/error');
const streamModel = require('../models/stream');
const internalNginx = require('./nginx');
const internalAuditLog = require('./audit-log');
function omissions () {
return ['is_deleted'];
}
const internalStream = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
return access.can('streams:create', data)
.then((/*access_data*/) => {
// TODO: At this point the existing ports should have been checked
data.owner_user_id = access.token.getUserId(1);
if (typeof data.meta === 'undefined') {
data.meta = {};
}
return streamModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((row) => {
// Configure nginx
return internalNginx.configure(streamModel, 'stream', row)
.then(() => {
return internalStream.get(access, {id: row.id, expand: ['owner']});
});
})
.then((row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'stream',
object_id: row.id,
meta: data
})
.then(() => {
return row;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @return {Promise}
*/
update: (access, data) => {
return access.can('streams:update', data.id)
.then((/*access_data*/) => {
// TODO: at this point the existing streams should have been checked
return internalStream.get(access, {id: data.id});
})
.then((row) => {
if (row.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('Stream could not be updated, IDs do not match: ' + row.id + ' !== ' + data.id);
}
return streamModel
.query()
.omit(omissions())
.patchAndFetchById(row.id, data)
.then((saved_row) => {
return internalNginx.configure(streamModel, 'stream', saved_row)
.then(() => {
return internalStream.get(access, {id: row.id, expand: ['owner']});
});
})
.then((saved_row) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'stream',
object_id: row.id,
meta: data
})
.then(() => {
return _.omit(saved_row, omissions());
});
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
return access.can('streams:get', data.id)
.then((access_data) => {
let query = streamModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[owner]')
.first();
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('streams:delete', data.id)
.then(() => {
return internalStream.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
}
return streamModel
.query()
.where('id', row.id)
.patch({
is_deleted: 1
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('stream', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'stream',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
enable: (access, data) => {
return access.can('streams:update', data.id)
.then(() => {
return internalStream.get(access, {
id: data.id,
expand: ['owner']
});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (row.enabled) {
throw new error.ValidationError('Host is already enabled');
}
row.enabled = 1;
return streamModel
.query()
.where('id', row.id)
.patch({
enabled: 1
})
.then(() => {
// Configure nginx
return internalNginx.configure(streamModel, 'stream', row);
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'enabled',
object_type: 'stream',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Number} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
disable: (access, data) => {
return access.can('streams:update', data.id)
.then(() => {
return internalStream.get(access, {id: data.id});
})
.then((row) => {
if (!row) {
throw new error.ItemNotFoundError(data.id);
} else if (!row.enabled) {
throw new error.ValidationError('Host is already disabled');
}
row.enabled = 0;
return streamModel
.query()
.where('id', row.id)
.patch({
enabled: 0
})
.then(() => {
// Delete Nginx Config
return internalNginx.deleteConfig('stream', row)
.then(() => {
return internalNginx.reload();
});
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'disabled',
object_type: 'stream-host',
object_id: row.id,
meta: _.omit(row, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* All Streams
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('streams:list')
.then((access_data) => {
let query = streamModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[owner]')
.orderBy('incoming_port', 'ASC');
if (access_data.permission_visibility !== 'all') {
query.andWhere('owner_user_id', access.token.getUserId(1));
}
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('incoming_port', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
});
},
/**
* Report use
*
* @param {Number} user_id
* @param {String} visibility
* @returns {Promise}
*/
getCount: (user_id, visibility) => {
let query = streamModel
.query()
.count('id as count')
.where('is_deleted', 0);
if (visibility !== 'all') {
query.andWhere('owner_user_id', user_id);
}
return query.first()
.then((row) => {
return parseInt(row.count, 10);
});
}
};
module.exports = internalStream;

162
backend/internal/token.js Normal file
View File

@ -0,0 +1,162 @@
const _ = require('lodash');
const error = require('../lib/error');
const userModel = require('../models/user');
const authModel = require('../models/auth');
const helpers = require('../lib/helpers');
const TokenModel = require('../models/token');
module.exports = {
/**
* @param {Object} data
* @param {String} data.identity
* @param {String} data.secret
* @param {String} [data.scope]
* @param {String} [data.expiry]
* @param {String} [issuer]
* @returns {Promise}
*/
getTokenFromEmail: (data, issuer) => {
let Token = new TokenModel();
data.scope = data.scope || 'user';
data.expiry = data.expiry || '1d';
return userModel
.query()
.where('email', data.identity)
.andWhere('is_deleted', 0)
.andWhere('is_disabled', 0)
.first()
.then((user) => {
if (user) {
// Get auth
return authModel
.query()
.where('user_id', '=', user.id)
.where('type', '=', 'password')
.first()
.then((auth) => {
if (auth) {
return auth.verifyPassword(data.secret)
.then((valid) => {
if (valid) {
if (data.scope !== 'user' && _.indexOf(user.roles, data.scope) === -1) {
// The scope requested doesn't exist as a role against the user,
// you shall not pass.
throw new error.AuthError('Invalid scope: ' + data.scope);
}
// Create a moment of the expiry expression
let expiry = helpers.parseDatePeriod(data.expiry);
if (expiry === null) {
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
}
return Token.create({
iss: issuer || 'api',
attrs: {
id: user.id
},
scope: [data.scope],
expiresIn: data.expiry
})
.then((signed) => {
return {
token: signed.token,
expires: expiry.toISOString()
};
});
} else {
throw new error.AuthError('Invalid password');
}
});
} else {
throw new error.AuthError('No password auth for user');
}
});
} else {
throw new error.AuthError('No relevant user found');
}
});
},
/**
* @param {Access} access
* @param {Object} [data]
* @param {String} [data.expiry]
* @param {String} [data.scope] Only considered if existing token scope is admin
* @returns {Promise}
*/
getFreshToken: (access, data) => {
let Token = new TokenModel();
data = data || {};
data.expiry = data.expiry || '1d';
if (access && access.token.getUserId(0)) {
// Create a moment of the expiry expression
let expiry = helpers.parseDatePeriod(data.expiry);
if (expiry === null) {
throw new error.AuthError('Invalid expiry time: ' + data.expiry);
}
let token_attrs = {
id: access.token.getUserId(0)
};
// Only admins can request otherwise scoped tokens
let scope = access.token.get('scope');
if (data.scope && access.token.hasScope('admin')) {
scope = [data.scope];
if (data.scope === 'job-board' || data.scope === 'worker') {
token_attrs.id = 0;
}
}
return Token.create({
iss: 'api',
scope: scope,
attrs: token_attrs,
expiresIn: data.expiry
})
.then((signed) => {
return {
token: signed.token,
expires: expiry.toISOString()
};
});
} else {
throw new error.AssertionFailedError('Existing token contained invalid user data');
}
},
/**
* @param {Object} user
* @returns {Promise}
*/
getTokenFromUser: (user) => {
const expire = '1d';
const Token = new TokenModel();
const expiry = helpers.parseDatePeriod(expire);
return Token.create({
iss: 'api',
attrs: {
id: user.id
},
scope: ['user'],
expiresIn: expire
})
.then((signed) => {
return {
token: signed.token,
expires: expiry.toISOString(),
user: user
};
});
}
};

518
backend/internal/user.js Normal file
View File

@ -0,0 +1,518 @@
const _ = require('lodash');
const error = require('../lib/error');
const userModel = require('../models/user');
const userPermissionModel = require('../models/user_permission');
const authModel = require('../models/auth');
const gravatar = require('gravatar');
const internalToken = require('./token');
const internalAuditLog = require('./audit-log');
function omissions () {
return ['is_deleted'];
}
const internalUser = {
/**
* @param {Access} access
* @param {Object} data
* @returns {Promise}
*/
create: (access, data) => {
let auth = data.auth || null;
delete data.auth;
data.avatar = data.avatar || '';
data.roles = data.roles || [];
if (typeof data.is_disabled !== 'undefined') {
data.is_disabled = data.is_disabled ? 1 : 0;
}
return access.can('users:create', data)
.then(() => {
data.avatar = gravatar.url(data.email, {default: 'mm'});
return userModel
.query()
.omit(omissions())
.insertAndFetch(data);
})
.then((user) => {
if (auth) {
return authModel
.query()
.insert({
user_id: user.id,
type: auth.type,
secret: auth.secret,
meta: {}
})
.then(() => {
return user;
});
} else {
return user;
}
})
.then((user) => {
// Create permissions row as well
let is_admin = data.roles.indexOf('admin') !== -1;
return userPermissionModel
.query()
.insert({
user_id: user.id,
visibility: is_admin ? 'all' : 'user',
proxy_hosts: 'manage',
redirection_hosts: 'manage',
dead_hosts: 'manage',
streams: 'manage',
access_lists: 'manage',
certificates: 'manage'
})
.then(() => {
return internalUser.get(access, {id: user.id, expand: ['permissions']});
});
})
.then((user) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'created',
object_type: 'user',
object_id: user.id,
meta: user
})
.then(() => {
return user;
});
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
* @param {String} [data.email]
* @param {String} [data.name]
* @return {Promise}
*/
update: (access, data) => {
if (typeof data.is_disabled !== 'undefined') {
data.is_disabled = data.is_disabled ? 1 : 0;
}
return access.can('users:update', data.id)
.then(() => {
// Make sure that the user being updated doesn't change their email to another user that is already using it
// 1. get user we want to update
return internalUser.get(access, {id: data.id})
.then((user) => {
// 2. if email is to be changed, find other users with that email
if (typeof data.email !== 'undefined') {
data.email = data.email.toLowerCase().trim();
if (user.email !== data.email) {
return internalUser.isEmailAvailable(data.email, data.id)
.then((available) => {
if (!available) {
throw new error.ValidationError('Email address already in use - ' + data.email);
}
return user;
});
}
}
// No change to email:
return user;
});
})
.then((user) => {
if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
}
data.avatar = gravatar.url(data.email || user.email, {default: 'mm'});
return userModel
.query()
.omit(omissions())
.patchAndFetchById(user.id, data)
.then((saved_user) => {
return _.omit(saved_user, omissions());
});
})
.then(() => {
return internalUser.get(access, {id: data.id});
})
.then((user) => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'user',
object_id: user.id,
meta: data
})
.then(() => {
return user;
});
});
},
/**
* @param {Access} access
* @param {Object} [data]
* @param {Integer} [data.id] Defaults to the token user
* @param {Array} [data.expand]
* @param {Array} [data.omit]
* @return {Promise}
*/
get: (access, data) => {
if (typeof data === 'undefined') {
data = {};
}
if (typeof data.id === 'undefined' || !data.id) {
data.id = access.token.getUserId(0);
}
return access.can('users:get', data.id)
.then(() => {
let query = userModel
.query()
.where('is_deleted', 0)
.andWhere('id', data.id)
.allowEager('[permissions]')
.first();
// Custom omissions
if (typeof data.omit !== 'undefined' && data.omit !== null) {
query.omit(data.omit);
}
if (typeof data.expand !== 'undefined' && data.expand !== null) {
query.eager('[' + data.expand.join(', ') + ']');
}
return query;
})
.then((row) => {
if (row) {
return _.omit(row, omissions());
} else {
throw new error.ItemNotFoundError(data.id);
}
});
},
/**
* Checks if an email address is available, but if a user_id is supplied, it will ignore checking
* against that user.
*
* @param email
* @param user_id
*/
isEmailAvailable: (email, user_id) => {
let query = userModel
.query()
.where('email', '=', email.toLowerCase().trim())
.where('is_deleted', 0)
.first();
if (typeof user_id !== 'undefined') {
query.where('id', '!=', user_id);
}
return query
.then((user) => {
return !user;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
* @param {String} [data.reason]
* @returns {Promise}
*/
delete: (access, data) => {
return access.can('users:delete', data.id)
.then(() => {
return internalUser.get(access, {id: data.id});
})
.then((user) => {
if (!user) {
throw new error.ItemNotFoundError(data.id);
}
// Make sure user can't delete themselves
if (user.id === access.token.getUserId(0)) {
throw new error.PermissionError('You cannot delete yourself.');
}
return userModel
.query()
.where('id', user.id)
.patch({
is_deleted: 1
})
.then(() => {
// Add to audit log
return internalAuditLog.add(access, {
action: 'deleted',
object_type: 'user',
object_id: user.id,
meta: _.omit(user, omissions())
});
});
})
.then(() => {
return true;
});
},
/**
* This will only count the users
*
* @param {Access} access
* @param {String} [search_query]
* @returns {*}
*/
getCount: (access, search_query) => {
return access.can('users:list')
.then(() => {
let query = userModel
.query()
.count('id as count')
.where('is_deleted', 0)
.first();
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('user.name', 'like', '%' + search_query + '%')
.orWhere('user.email', 'like', '%' + search_query + '%');
});
}
return query;
})
.then((row) => {
return parseInt(row.count, 10);
});
},
/**
* All users
*
* @param {Access} access
* @param {Array} [expand]
* @param {String} [search_query]
* @returns {Promise}
*/
getAll: (access, expand, search_query) => {
return access.can('users:list')
.then(() => {
let query = userModel
.query()
.where('is_deleted', 0)
.groupBy('id')
.omit(['is_deleted'])
.allowEager('[permissions]')
.orderBy('name', 'ASC');
// Query is used for searching
if (typeof search_query === 'string') {
query.where(function () {
this.where('name', 'like', '%' + search_query + '%')
.orWhere('email', 'like', '%' + search_query + '%');
});
}
if (typeof expand !== 'undefined' && expand !== null) {
query.eager('[' + expand.join(', ') + ']');
}
return query;
});
},
/**
* @param {Access} access
* @param {Integer} [id_requested]
* @returns {[String]}
*/
getUserOmisionsByAccess: (access, id_requested) => {
let response = []; // Admin response
if (!access.token.hasScope('admin') && access.token.getUserId(0) !== id_requested) {
response = ['roles', 'is_deleted']; // Restricted response
}
return response;
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
* @param {String} data.type
* @param {String} data.secret
* @return {Promise}
*/
setPassword: (access, data) => {
return access.can('users:password', data.id)
.then(() => {
return internalUser.get(access, {id: data.id});
})
.then((user) => {
if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
}
if (user.id === access.token.getUserId(0)) {
// they're setting their own password. Make sure their current password is correct
if (typeof data.current === 'undefined' || !data.current) {
throw new error.ValidationError('Current password was not supplied');
}
return internalToken.getTokenFromEmail({
identity: user.email,
secret: data.current
})
.then(() => {
return user;
});
}
return user;
})
.then((user) => {
// Get auth, patch if it exists
return authModel
.query()
.where('user_id', user.id)
.andWhere('type', data.type)
.first()
.then((existing_auth) => {
if (existing_auth) {
// patch
return authModel
.query()
.where('user_id', user.id)
.andWhere('type', data.type)
.patch({
type: data.type, // This is required for the model to encrypt on save
secret: data.secret
});
} else {
// insert
return authModel
.query()
.insert({
user_id: user.id,
type: data.type,
secret: data.secret,
meta: {}
});
}
})
.then(() => {
// Add to Audit Log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'user',
object_id: user.id,
meta: {
name: user.name,
password_changed: true,
auth_type: data.type
}
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @return {Promise}
*/
setPermissions: (access, data) => {
return access.can('users:permissions', data.id)
.then(() => {
return internalUser.get(access, {id: data.id});
})
.then((user) => {
if (user.id !== data.id) {
// Sanity check that something crazy hasn't happened
throw new error.InternalValidationError('User could not be updated, IDs do not match: ' + user.id + ' !== ' + data.id);
}
return user;
})
.then((user) => {
// Get perms row, patch if it exists
return userPermissionModel
.query()
.where('user_id', user.id)
.first()
.then((existing_auth) => {
if (existing_auth) {
// patch
return userPermissionModel
.query()
.where('user_id', user.id)
.patchAndFetchById(existing_auth.id, _.assign({user_id: user.id}, data));
} else {
// insert
return userPermissionModel
.query()
.insertAndFetch(_.assign({user_id: user.id}, data));
}
})
.then((permissions) => {
// Add to Audit Log
return internalAuditLog.add(access, {
action: 'updated',
object_type: 'user',
object_id: user.id,
meta: {
name: user.name,
permissions: permissions
}
});
});
})
.then(() => {
return true;
});
},
/**
* @param {Access} access
* @param {Object} data
* @param {Integer} data.id
*/
loginAs: (access, data) => {
return access.can('users:loginas', data.id)
.then(() => {
return internalUser.get(access, data);
})
.then((user) => {
return internalToken.getTokenFromUser(user);
});
}
};
module.exports = internalUser;

19
backend/knexfile.js Normal file
View File

@ -0,0 +1,19 @@
module.exports = {
development: {
client: 'mysql',
migrations: {
tableName: 'migrations',
stub: 'lib/migrate_template.js',
directory: 'migrations'
}
},
production: {
client: 'mysql',
migrations: {
tableName: 'migrations',
stub: 'lib/migrate_template.js',
directory: 'migrations'
}
}
};

314
backend/lib/access.js Normal file
View File

@ -0,0 +1,314 @@
/**
* Some Notes: This is a friggin complicated piece of code.
*
* "scope" in this file means "where did this token come from and what is using it", so 99% of the time
* the "scope" is going to be "user" because it would be a user token. This is not to be confused with
* the "role" which could be "user" or "admin". The scope in fact, could be "worker" or anything else.
*
*
*/
const _ = require('lodash');
const logger = require('../logger').access;
const validator = require('ajv');
const error = require('./error');
const userModel = require('../models/user');
const proxyHostModel = require('../models/proxy_host');
const TokenModel = require('../models/token');
const roleSchema = require('./access/roles.json');
const permsSchema = require('./access/permissions.json');
module.exports = function (token_string) {
let Token = new TokenModel();
let token_data = null;
let initialised = false;
let object_cache = {};
let allow_internal_access = false;
let user_roles = [];
let permissions = {};
/**
* Loads the Token object from the token string
*
* @returns {Promise}
*/
this.init = () => {
return new Promise((resolve, reject) => {
if (initialised) {
resolve();
} else if (!token_string) {
reject(new error.PermissionError('Permission Denied'));
} else {
resolve(Token.load(token_string)
.then((data) => {
token_data = data;
// At this point we need to load the user from the DB and make sure they:
// - exist (and not soft deleted)
// - still have the appropriate scopes for this token
// This is only required when the User ID is supplied or if the token scope has `user`
if (token_data.attrs.id || (typeof token_data.scope !== 'undefined' && _.indexOf(token_data.scope, 'user') !== -1)) {
// Has token user id or token user scope
return userModel
.query()
.where('id', token_data.attrs.id)
.andWhere('is_deleted', 0)
.andWhere('is_disabled', 0)
.allowEager('[permissions]')
.eager('[permissions]')
.first()
.then((user) => {
if (user) {
// make sure user has all scopes of the token
// The `user` role is not added against the user row, so we have to just add it here to get past this check.
user.roles.push('user');
let is_ok = true;
_.forEach(token_data.scope, (scope_item) => {
if (_.indexOf(user.roles, scope_item) === -1) {
is_ok = false;
}
});
if (!is_ok) {
throw new error.AuthError('Invalid token scope for User');
} else {
initialised = true;
user_roles = user.roles;
permissions = user.permissions;
}
} else {
throw new error.AuthError('User cannot be loaded for Token');
}
});
} else {
initialised = true;
}
}));
}
});
};
/**
* Fetches the object ids from the database, only once per object type, for this token.
* This only applies to USER token scopes, as all other tokens are not really bound
* by object scopes
*
* @param {String} object_type
* @returns {Promise}
*/
this.loadObjects = (object_type) => {
return new Promise((resolve, reject) => {
if (Token.hasScope('user')) {
if (typeof token_data.attrs.id === 'undefined' || !token_data.attrs.id) {
reject(new error.AuthError('User Token supplied without a User ID'));
} else {
let token_user_id = token_data.attrs.id ? token_data.attrs.id : 0;
let query;
if (typeof object_cache[object_type] === 'undefined') {
switch (object_type) {
// USERS - should only return yourself
case 'users':
resolve(token_user_id ? [token_user_id] : []);
break;
// Proxy Hosts
case 'proxy_hosts':
query = proxyHostModel
.query()
.select('id')
.andWhere('is_deleted', 0);
if (permissions.visibility === 'user') {
query.andWhere('owner_user_id', token_user_id);
}
resolve(query
.then((rows) => {
let result = [];
_.forEach(rows, (rule_row) => {
result.push(rule_row.id);
});
// enum should not have less than 1 item
if (!result.length) {
result.push(0);
}
return result;
})
);
break;
// DEFAULT: null
default:
resolve(null);
break;
}
} else {
resolve(object_cache[object_type]);
}
}
} else {
resolve(null);
}
})
.then((objects) => {
object_cache[object_type] = objects;
return objects;
});
};
/**
* Creates a schema object on the fly with the IDs and other values required to be checked against the permissionSchema
*
* @param {String} permission_label
* @returns {Object}
*/
this.getObjectSchema = (permission_label) => {
let base_object_type = permission_label.split(':').shift();
let schema = {
$id: 'objects',
$schema: 'http://json-schema.org/draft-07/schema#',
description: 'Actor Properties',
type: 'object',
additionalProperties: false,
properties: {
user_id: {
anyOf: [
{
type: 'number',
enum: [Token.get('attrs').id]
}
]
},
scope: {
type: 'string',
pattern: '^' + Token.get('scope') + '$'
}
}
};
return this.loadObjects(base_object_type)
.then((object_result) => {
if (typeof object_result === 'object' && object_result !== null) {
schema.properties[base_object_type] = {
type: 'number',
enum: object_result,
minimum: 1
};
} else {
schema.properties[base_object_type] = {
type: 'number',
minimum: 1
};
}
return schema;
});
};
return {
token: Token,
/**
*
* @param {Boolean} [allow_internal]
* @returns {Promise}
*/
load: (allow_internal) => {
return new Promise(function (resolve/*, reject*/) {
if (token_string) {
resolve(Token.load(token_string));
} else {
allow_internal_access = allow_internal;
resolve(allow_internal_access || null);
}
});
},
reloadObjects: this.loadObjects,
/**
*
* @param {String} permission
* @param {*} [data]
* @returns {Promise}
*/
can: (permission, data) => {
if (allow_internal_access === true) {
return Promise.resolve(true);
//return true;
} else {
return this.init()
.then(() => {
// Initialised, token decoded ok
return this.getObjectSchema(permission)
.then((objectSchema) => {
let data_schema = {
[permission]: {
data: data,
scope: Token.get('scope'),
roles: user_roles,
permission_visibility: permissions.visibility,
permission_proxy_hosts: permissions.proxy_hosts,
permission_redirection_hosts: permissions.redirection_hosts,
permission_dead_hosts: permissions.dead_hosts,
permission_streams: permissions.streams,
permission_access_lists: permissions.access_lists,
permission_certificates: permissions.certificates
}
};
let permissionSchema = {
$schema: 'http://json-schema.org/draft-07/schema#',
$async: true,
$id: 'permissions',
additionalProperties: false,
properties: {}
};
permissionSchema.properties[permission] = require('./access/' + permission.replace(/:/gim, '-') + '.json');
// logger.info('objectSchema', JSON.stringify(objectSchema, null, 2));
// logger.info('permissionSchema', JSON.stringify(permissionSchema, null, 2));
// logger.info('data_schema', JSON.stringify(data_schema, null, 2));
let ajv = validator({
verbose: true,
allErrors: true,
format: 'full',
missingRefs: 'fail',
breakOnError: true,
coerceTypes: true,
schemas: [
roleSchema,
permsSchema,
objectSchema,
permissionSchema
]
});
return ajv.validate('permissions', data_schema)
.then(() => {
return data_schema[permission];
});
});
})
.catch((err) => {
err.permission = permission;
err.permission_data = data;
logger.error(permission, data, err.message);
throw new error.PermissionError('Permission Denied', err);
});
}
}
};
};

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_access_lists", "roles"],
"properties": {
"permission_access_lists": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_access_lists", "roles"],
"properties": {
"permission_access_lists": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_access_lists", "roles"],
"properties": {
"permission_access_lists": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_access_lists", "roles"],
"properties": {
"permission_access_lists": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_access_lists", "roles"],
"properties": {
"permission_access_lists": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_certificates", "roles"],
"properties": {
"permission_certificates": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_certificates", "roles"],
"properties": {
"permission_certificates": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_certificates", "roles"],
"properties": {
"permission_certificates": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_certificates", "roles"],
"properties": {
"permission_certificates": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_certificates", "roles"],
"properties": {
"permission_certificates": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_dead_hosts", "roles"],
"properties": {
"permission_dead_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_dead_hosts", "roles"],
"properties": {
"permission_dead_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_dead_hosts", "roles"],
"properties": {
"permission_dead_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_dead_hosts", "roles"],
"properties": {
"permission_dead_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_dead_hosts", "roles"],
"properties": {
"permission_dead_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,14 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "perms",
"definitions": {
"view": {
"type": "string",
"pattern": "^(view|manage)$"
},
"manage": {
"type": "string",
"pattern": "^(manage)$"
}
}
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_proxy_hosts", "roles"],
"properties": {
"permission_proxy_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_proxy_hosts", "roles"],
"properties": {
"permission_proxy_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_proxy_hosts", "roles"],
"properties": {
"permission_proxy_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_proxy_hosts", "roles"],
"properties": {
"permission_proxy_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_proxy_hosts", "roles"],
"properties": {
"permission_proxy_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_redirection_hosts", "roles"],
"properties": {
"permission_redirection_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_redirection_hosts", "roles"],
"properties": {
"permission_redirection_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_redirection_hosts", "roles"],
"properties": {
"permission_redirection_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_redirection_hosts", "roles"],
"properties": {
"permission_redirection_hosts": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_redirection_hosts", "roles"],
"properties": {
"permission_redirection_hosts": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/user"
}
]
}

View File

@ -0,0 +1,39 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "roles",
"definitions": {
"admin": {
"type": "object",
"required": ["scope", "roles"],
"properties": {
"scope": {
"type": "array",
"contains": {
"type": "string",
"pattern": "^user$"
}
},
"roles": {
"type": "array",
"contains": {
"type": "string",
"pattern": "^admin$"
}
}
}
},
"user": {
"type": "object",
"required": ["scope"],
"properties": {
"scope": {
"type": "array",
"contains": {
"type": "string",
"pattern": "^user$"
}
}
}
}
}
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_streams", "roles"],
"properties": {
"permission_streams": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_streams", "roles"],
"properties": {
"permission_streams": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_streams", "roles"],
"properties": {
"permission_streams": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_streams", "roles"],
"properties": {
"permission_streams": {
"$ref": "perms#/definitions/view"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["permission_streams", "roles"],
"properties": {
"permission_streams": {
"$ref": "perms#/definitions/manage"
},
"roles": {
"type": "array",
"items": {
"type": "string",
"enum": ["user"]
}
}
}
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["data", "scope"],
"properties": {
"data": {
"$ref": "objects#/properties/users"
},
"scope": {
"type": "array",
"contains": {
"type": "string",
"pattern": "^user$"
}
}
}
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["data", "scope"],
"properties": {
"data": {
"$ref": "objects#/properties/users"
},
"scope": {
"type": "array",
"contains": {
"type": "string",
"pattern": "^user$"
}
}
}
}
]
}

View File

@ -0,0 +1,7 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
}
]
}

View File

@ -0,0 +1,23 @@
{
"anyOf": [
{
"$ref": "roles#/definitions/admin"
},
{
"type": "object",
"required": ["data", "scope"],
"properties": {
"data": {
"$ref": "objects#/properties/users"
},
"scope": {
"type": "array",
"contains": {
"type": "string",
"pattern": "^user$"
}
}
}
}
]
}

90
backend/lib/error.js Normal file
View File

@ -0,0 +1,90 @@
const _ = require('lodash');
const util = require('util');
module.exports = {
PermissionError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = 'Permission Denied';
this.public = true;
this.status = 403;
},
ItemNotFoundError: function (id, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = 'Item Not Found - ' + id;
this.public = true;
this.status = 404;
},
AuthError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = message;
this.public = true;
this.status = 401;
},
InternalError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = message;
this.status = 500;
this.public = false;
},
InternalValidationError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = message;
this.status = 400;
this.public = false;
},
ConfigurationError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = message;
this.status = 400;
this.public = true;
},
CacheError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.message = message;
this.previous = previous;
this.status = 500;
this.public = false;
},
ValidationError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = message;
this.public = true;
this.status = 400;
},
AssertionFailedError: function (message, previous) {
Error.captureStackTrace(this, this.constructor);
this.name = this.constructor.name;
this.previous = previous;
this.message = message;
this.public = false;
this.status = 400;
}
};
_.forEach(module.exports, function (error) {
util.inherits(error, Error);
});

View File

@ -0,0 +1,30 @@
const validator = require('../validator');
module.exports = function (req, res, next) {
if (req.headers.origin) {
// very relaxed validation....
validator({
type: 'string',
pattern: '^[a-z\\-]+:\\/\\/(?:[\\w\\-\\.]+(:[0-9]+)?/?)?$'
}, req.headers.origin)
.then(function () {
res.set({
'Access-Control-Allow-Origin': req.headers.origin,
'Access-Control-Allow-Credentials': true,
'Access-Control-Allow-Methods': 'OPTIONS, GET, POST',
'Access-Control-Allow-Headers': 'Content-Type, Cache-Control, Pragma, Expires, Authorization, X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit',
'Access-Control-Max-Age': 5 * 60,
'Access-Control-Expose-Headers': 'X-Dataset-Total, X-Dataset-Offset, X-Dataset-Limit'
});
next();
})
.catch(next);
} else {
// No origin
next();
}
};

View File

@ -0,0 +1,15 @@
const Access = require('../access');
module.exports = () => {
return function (req, res, next) {
res.locals.access = null;
let access = new Access(res.locals.token || null);
access.load()
.then(() => {
res.locals.access = access;
next();
})
.catch(next);
};
};

View File

@ -0,0 +1,13 @@
module.exports = function () {
return function (req, res, next) {
if (req.headers.authorization) {
let parts = req.headers.authorization.split(' ');
if (parts && parts[0] === 'Bearer' && parts[1]) {
res.locals.token = parts[1];
}
}
next();
};
};

View File

@ -0,0 +1,55 @@
let _ = require('lodash');
module.exports = function (default_sort, default_offset, default_limit, max_limit) {
/**
* This will setup the req query params with filtered data and defaults
*
* sort will be an array of fields and their direction
* offset will be an int, defaulting to zero if no other default supplied
* limit will be an int, defaulting to 50 if no other default supplied, and limited to the max if that was supplied
*
*/
return function (req, res, next) {
req.query.offset = typeof req.query.limit === 'undefined' ? default_offset || 0 : parseInt(req.query.offset, 10);
req.query.limit = typeof req.query.limit === 'undefined' ? default_limit || 50 : parseInt(req.query.limit, 10);
if (max_limit && req.query.limit > max_limit) {
req.query.limit = max_limit;
}
// Sorting
let sort = typeof req.query.sort === 'undefined' ? default_sort : req.query.sort;
let myRegexp = /.*\.(asc|desc)$/ig;
let sort_array = [];
sort = sort.split(',');
_.map(sort, function (val) {
let matches = myRegexp.exec(val);
if (matches !== null) {
let dir = matches[1];
sort_array.push({
field: val.substr(0, val.length - (dir.length + 1)),
dir: dir.toLowerCase()
});
} else {
sort_array.push({
field: val,
dir: 'asc'
});
}
});
// Sort will now be in this format:
// [
// { field: 'field1', dir: 'asc' },
// { field: 'field2', dir: 'desc' }
// ]
req.query.sort = sort_array;
next();
};
};

View File

@ -0,0 +1,9 @@
module.exports = (req, res, next) => {
if (req.params.user_id === 'me' && res.locals.access) {
req.params.user_id = res.locals.access.token.get('attrs').id;
} else {
req.params.user_id = parseInt(req.params.user_id, 10);
}
next();
};

32
backend/lib/helpers.js Normal file
View File

@ -0,0 +1,32 @@
const moment = require('moment');
module.exports = {
/**
* Takes an expression such as 30d and returns a moment object of that date in future
*
* Key Shorthand
* ==================
* years y
* quarters Q
* months M
* weeks w
* days d
* hours h
* minutes m
* seconds s
* milliseconds ms
*
* @param {String} expression
* @returns {Object}
*/
parseDatePeriod: function (expression) {
let matches = expression.match(/^([0-9]+)(y|Q|M|w|d|h|m|s|ms)$/m);
if (matches) {
return moment().add(matches[1], matches[2]);
}
return null;
}
};

View File

@ -0,0 +1,55 @@
const migrate_name = 'identifier_for_migrate';
const logger = require('../logger').migrate;
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = function (knex, Promise) {
logger.info('[' + migrate_name + '] Migrating Up...');
// Create Table example:
/*return knex.schema.createTable('notification', (table) => {
table.increments().primary();
table.string('name').notNull();
table.string('type').notNull();
table.integer('created_on').notNull();
table.integer('modified_on').notNull();
})
.then(function () {
logger.info('[' + migrate_name + '] Notification Table created');
});*/
logger.info('[' + migrate_name + '] Migrating Up Complete');
return Promise.resolve(true);
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex, Promise) {
logger.info('[' + migrate_name + '] Migrating Down...');
// Drop table example:
/*return knex.schema.dropTable('notification')
.then(() => {
logger.info('[' + migrate_name + '] Notification Table dropped');
});*/
logger.info('[' + migrate_name + '] Migrating Down Complete');
return Promise.resolve(true);
};

20
backend/lib/utils.js Normal file
View File

@ -0,0 +1,20 @@
const exec = require('child_process').exec;
module.exports = {
/**
* @param {String} cmd
* @returns {Promise}
*/
exec: function (cmd) {
return new Promise((resolve, reject) => {
exec(cmd, function (err, stdout, /*stderr*/) {
if (err && typeof err === 'object') {
reject(err);
} else {
resolve(stdout.trim());
}
});
});
}
};

View File

@ -0,0 +1,45 @@
const error = require('../error');
const path = require('path');
const parser = require('json-schema-ref-parser');
const ajv = require('ajv')({
verbose: true,
validateSchema: true,
allErrors: false,
format: 'full',
coerceTypes: true
});
/**
* @param {Object} schema
* @param {Object} payload
* @returns {Promise}
*/
function apiValidator (schema, payload/*, description*/) {
return new Promise(function Promise_apiValidator (resolve, reject) {
if (typeof payload === 'undefined') {
reject(new error.ValidationError('Payload is undefined'));
}
let validate = ajv.compile(schema);
let valid = validate(payload);
if (valid && !validate.errors) {
resolve(payload);
} else {
let message = ajv.errorsText(validate.errors);
let err = new error.ValidationError(message);
err.debug = [validate.errors, payload];
reject(err);
}
});
}
apiValidator.loadSchemas = parser
.dereference(path.resolve('schema/index.json'))
.then((schema) => {
ajv.addSchema(schema);
return schema;
});
module.exports = apiValidator;

View File

@ -0,0 +1,49 @@
const _ = require('lodash');
const error = require('../error');
const definitions = require('../../schema/definitions.json');
RegExp.prototype.toJSON = RegExp.prototype.toString;
const ajv = require('ajv')({
verbose: true, //process.env.NODE_ENV === 'development',
allErrors: true,
format: 'full', // strict regexes for format checks
coerceTypes: true,
schemas: [
definitions
]
});
/**
*
* @param {Object} schema
* @param {Object} payload
* @returns {Promise}
*/
function validator (schema, payload) {
return new Promise(function (resolve, reject) {
if (!payload) {
reject(new error.InternalValidationError('Payload is falsy'));
} else {
try {
let validate = ajv.compile(schema);
let valid = validate(payload);
if (valid && !validate.errors) {
resolve(_.cloneDeep(payload));
} else {
let message = ajv.errorsText(validate.errors);
reject(new error.InternalValidationError(message));
}
} catch (err) {
reject(err);
}
}
});
}
module.exports = validator;

13
backend/logger.js Normal file
View File

@ -0,0 +1,13 @@
const {Signale} = require('signale');
module.exports = {
global: new Signale({scope: 'Global '}),
migrate: new Signale({scope: 'Migrate '}),
express: new Signale({scope: 'Express '}),
access: new Signale({scope: 'Access '}),
nginx: new Signale({scope: 'Nginx '}),
ssl: new Signale({scope: 'SSL '}),
import: new Signale({scope: 'Importer '}),
setup: new Signale({scope: 'Setup '}),
ip_ranges: new Signale({scope: 'IP Ranges'})
};

15
backend/migrate.js Normal file
View File

@ -0,0 +1,15 @@
const db = require('./db');
const logger = require('./logger').migrate;
module.exports = {
latest: function () {
return db.migrate.currentVersion()
.then((version) => {
logger.info('Current database version:', version);
return db.migrate.latest({
tableName: 'migrations',
directory: 'migrations'
});
});
}
};

View File

@ -0,0 +1,205 @@
const migrate_name = 'initial-schema';
const logger = require('../logger').migrate;
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = function (knex/*, Promise*/) {
logger.info('[' + migrate_name + '] Migrating Up...');
return knex.schema.createTable('auth', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('user_id').notNull().unsigned();
table.string('type', 30).notNull();
table.string('secret').notNull();
table.json('meta').notNull();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
})
.then(() => {
logger.info('[' + migrate_name + '] auth Table created');
return knex.schema.createTable('user', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.integer('is_disabled').notNull().unsigned().defaultTo(0);
table.string('email').notNull();
table.string('name').notNull();
table.string('nickname').notNull();
table.string('avatar').notNull();
table.json('roles').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] user Table created');
return knex.schema.createTable('user_permission', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('user_id').notNull().unsigned();
table.string('visibility').notNull();
table.string('proxy_hosts').notNull();
table.string('redirection_hosts').notNull();
table.string('dead_hosts').notNull();
table.string('streams').notNull();
table.string('access_lists').notNull();
table.string('certificates').notNull();
table.unique('user_id');
});
})
.then(() => {
logger.info('[' + migrate_name + '] user_permission Table created');
return knex.schema.createTable('proxy_host', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull();
table.string('forward_ip').notNull();
table.integer('forward_port').notNull().unsigned();
table.integer('access_list_id').notNull().unsigned().defaultTo(0);
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
table.integer('caching_enabled').notNull().unsigned().defaultTo(0);
table.integer('block_exploits').notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo('');
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] proxy_host Table created');
return knex.schema.createTable('redirection_host', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull();
table.string('forward_domain_name').notNull();
table.integer('preserve_path').notNull().unsigned().defaultTo(0);
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
table.integer('block_exploits').notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo('');
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] redirection_host Table created');
return knex.schema.createTable('dead_host', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.json('domain_names').notNull();
table.integer('certificate_id').notNull().unsigned().defaultTo(0);
table.integer('ssl_forced').notNull().unsigned().defaultTo(0);
table.text('advanced_config').notNull().defaultTo('');
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] dead_host Table created');
return knex.schema.createTable('stream', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.integer('incoming_port').notNull().unsigned();
table.string('forward_ip').notNull();
table.integer('forwarding_port').notNull().unsigned();
table.integer('tcp_forwarding').notNull().unsigned().defaultTo(0);
table.integer('udp_forwarding').notNull().unsigned().defaultTo(0);
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] stream Table created');
return knex.schema.createTable('access_list', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.string('name').notNull();
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] access_list Table created');
return knex.schema.createTable('certificate', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('owner_user_id').notNull().unsigned();
table.integer('is_deleted').notNull().unsigned().defaultTo(0);
table.string('provider').notNull();
table.string('nice_name').notNull().defaultTo('');
table.json('domain_names').notNull();
table.dateTime('expires_on').notNull();
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] certificate Table created');
return knex.schema.createTable('access_list_auth', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('access_list_id').notNull().unsigned();
table.string('username').notNull();
table.string('password').notNull();
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] access_list_auth Table created');
return knex.schema.createTable('audit_log', (table) => {
table.increments().primary();
table.dateTime('created_on').notNull();
table.dateTime('modified_on').notNull();
table.integer('user_id').notNull().unsigned();
table.string('object_type').notNull().defaultTo('');
table.integer('object_id').notNull().unsigned().defaultTo(0);
table.string('action').notNull();
table.json('meta').notNull();
});
})
.then(() => {
logger.info('[' + migrate_name + '] audit_log Table created');
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex, Promise) {
logger.warn('[' + migrate_name + '] You can\'t migrate down the initial data.');
return Promise.resolve(true);
};

View File

@ -0,0 +1,35 @@
const migrate_name = 'websockets';
const logger = require('../logger').migrate;
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = function (knex/*, Promise*/) {
logger.info('[' + migrate_name + '] Migrating Up...');
return knex.schema.table('proxy_host', function (proxy_host) {
proxy_host.integer('allow_websocket_upgrade').notNull().unsigned().defaultTo(0);
})
.then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered');
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex, Promise) {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
return Promise.resolve(true);
};

View File

@ -0,0 +1,49 @@
const migrate_name = 'http2_support';
const logger = require('../logger').migrate;
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = function (knex/*, Promise*/) {
logger.info('[' + migrate_name + '] Migrating Up...');
return knex.schema.table('proxy_host', function (proxy_host) {
proxy_host.integer('http2_support').notNull().unsigned().defaultTo(0);
})
.then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered');
return knex.schema.table('redirection_host', function (redirection_host) {
redirection_host.integer('http2_support').notNull().unsigned().defaultTo(0);
});
})
.then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered');
return knex.schema.table('dead_host', function (dead_host) {
dead_host.integer('http2_support').notNull().unsigned().defaultTo(0);
});
})
.then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered');
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex, Promise) {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
return Promise.resolve(true);
};

View File

@ -0,0 +1,55 @@
const migrate_name = 'disabled';
const logger = require('../logger').migrate;
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.up = function (knex/*, Promise*/) {
logger.info('[' + migrate_name + '] Migrating Up...');
return knex.schema.table('proxy_host', function (proxy_host) {
proxy_host.integer('enabled').notNull().unsigned().defaultTo(1);
})
.then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered');
return knex.schema.table('redirection_host', function (redirection_host) {
redirection_host.integer('enabled').notNull().unsigned().defaultTo(1);
});
})
.then(() => {
logger.info('[' + migrate_name + '] redirection_host Table altered');
return knex.schema.table('dead_host', function (dead_host) {
dead_host.integer('enabled').notNull().unsigned().defaultTo(1);
});
})
.then(() => {
logger.info('[' + migrate_name + '] dead_host Table altered');
return knex.schema.table('stream', function (stream) {
stream.integer('enabled').notNull().unsigned().defaultTo(1);
});
})
.then(() => {
logger.info('[' + migrate_name + '] stream Table altered');
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @param {Promise} Promise
* @returns {Promise}
*/
exports.down = function (knex, Promise) {
logger.warn('[' + migrate_name + '] You can\'t migrate down this one.');
return Promise.resolve(true);
};

View File

@ -19,7 +19,7 @@ exports.up = function (knex/*, Promise*/) {
})
.then(() => {
logger.info('[' + migrate_name + '] proxy_host Table altered');
})
});
};
/**

Some files were not shown because too many files have changed in this diff Show More