diff --git a/.Dockerfile.swp b/.Dockerfile.swp deleted file mode 100644 index 1eb762a..0000000 Binary files a/.Dockerfile.swp and /dev/null differ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..4eba6cb --- /dev/null +++ b/.gitmodules @@ -0,0 +1,9 @@ +[submodule "api"] + path = api + url = https://github.com/deepmarket/api.git +[submodule "core"] + path = core + url = https://github.com/deepmarket/core.git +[submodule "docker-spark"] + path = docker-spark + url = https://github.com/deepmarket/docker-spark.git diff --git a/README.md b/README.md index 181f619..e2b42d6 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,31 @@ # DeepShare An OpenSource resource sharing platform accompanied by the capability to execute Distributed Tensorflow Programs at a very low cost. + +To clone: `git clone --recurse-submodules https://github.com/shared-systems/DeepShare.git` + +To build `docker-compose up --build --detach` + +To stop `docker-compose down` + +To update commit hash `git submodule update --recursive --remote` + +To pull new changes from each repo `git submodule foreach git pull origin develop` +Note: This requires a develop branch to exist in each submodule + +To clean `docker container prune -f && docker image prune -f && docker network prune -f && docker volume prune -f` + +### Potential Problems: +Some of our servers have ran into a problem with apparmor disallowing us to bring down some of the containers we're using. While we look into this issue, there's a [workaround](https://forums.docker.com/t/can-not-stop-docker-container-permission-denied-error/41142/6) that looks like this: +```sh +# Check the status of AppArmor (there should be some comments about the docker user being in `enforce-mode`) +sudo aa-status + +# Disable the service +sudo systemctl disable apparmor.service --now + +# Make sure it doesn't come back up when the machine restarts (once a permanent solution is found we'll re-roll the system anyways) +sudo service apparmor teardown + +# The status should now say that no programs are in `enforce-mode` +sudo aa-status +``` diff --git a/api b/api new file mode 160000 index 0000000..8bffa3f --- /dev/null +++ b/api @@ -0,0 +1 @@ +Subproject commit 8bffa3f6d063828ad2b87e2fc06ac1b54f9ea835 diff --git a/core b/core new file mode 160000 index 0000000..6122fef --- /dev/null +++ b/core @@ -0,0 +1 @@ +Subproject commit 6122fef62859666ee74e326d1b0aa508ffe182c2 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..51b1842 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,90 @@ +version: "3" +services: + +# nginx: +# image: nginx:1.15-alpine +# ports: +# - "80:80" +# - "443:443" +# volumes: +# - ./conf/nginx:/etc/nginx/conf.d + + # General server acting as reverse proxy to the api + nginx: + image: owasp/modsecurity + ports: + - "80:80" + - "443:443" + volumes: + - ./nginx:/etc/nginx/conf.d + + # Client for generating CA certs + #certbot: + # image: certbot/certbot + + # Api infrastructure as backend for clients (i.e. pluto) + api: + container_name: deepmarket_api + + build: ./api + image: "api:latest" + + restart: always + + environment: + - MONGO_DATABASE_URL=mongodb://deepmarket_db:27017 + + ports: + - "8080:8080" + + networks: + - services_bridge + - db_bridge + + depends_on: + - mongo + + command: npm start + + # Price generation microservice + generate_prices: + container_name: deepmarket_gen_prices + + build: ./core/generate_prices + image: "generate_prices:latest" + + # restart: always + environment: + - API_PRICING_ENDPOINT=http://deepmarket_api:8080/api/v1/pricing + + networks: + - services_bridge + + depends_on: + - api + + # Mongo db instance as backend for api + mongo: + container_name: deepmarket_db + image: mongo + + networks: + - db_bridge + + volumes: + - /data/db:/data/db + + ports: + - "27017:27017" + +networks: + + services_bridge: + driver: bridge + + web_bridge: + driver: bridge + + db_bridge: + driver: bridge + diff --git a/docker-spark b/docker-spark new file mode 160000 index 0000000..d385093 --- /dev/null +++ b/docker-spark @@ -0,0 +1 @@ +Subproject commit d3850933e7a8efd80ef0d2aa623a9cbfb28d4560 diff --git a/sbin/executor-setup.sh b/sbin/executor-setup.sh deleted file mode 100644 index 2b5f700..0000000 --- a/sbin/executor-setup.sh +++ /dev/null @@ -1,57 +0,0 @@ -#/!/bin/bash -# Script to start -# 1.Apache Spark-Master -# 2.HDFS -# 3.MongoDatabase -# 4.DeepShare Services -# 5.Backend Services - -echo "Welcome to DeepShare services" - -#Start Apache Spark Master- Check if the SPARK 8989 -if ! lsof -i:8989 -then - ~/Documents/spark-2.3.0-bin-hadoop2.7/sbin/start-master.sh - echo "return status is $?" -else - echo "return status is $?" -fi - - - -#Start HDFS-Check if the hadoop UI is up and running -if ! lsof -i:50070 -then - ~/Documents/hadoop-2.7.5/sbin/start-dfs.sh - echo "return status is $?" -else - echo "return status is $?" -fi - - - - -#Start Mongo Database //Need to test if there was an error in starting up, will $? be returning 1 or 0 -if ! lsof -i:27017 -then - mongod - echo "return status is $?" -else - echo "return status is $?" -fi - - -#Start DeepShare Services- Check if the 8080 port is used by any process -if ! lsof -i:8080 -then - node ~/Documents/api/app.js - echo "return status is $?" -else - echo "return status is $?" -fi - - -#Start Backend Services -#1.Scheduler.py -#2. Update_Job_Status.py - diff --git a/sbin/services-setup.sh b/sbin/services-setup.sh deleted file mode 100644 index 2b5f700..0000000 --- a/sbin/services-setup.sh +++ /dev/null @@ -1,57 +0,0 @@ -#/!/bin/bash -# Script to start -# 1.Apache Spark-Master -# 2.HDFS -# 3.MongoDatabase -# 4.DeepShare Services -# 5.Backend Services - -echo "Welcome to DeepShare services" - -#Start Apache Spark Master- Check if the SPARK 8989 -if ! lsof -i:8989 -then - ~/Documents/spark-2.3.0-bin-hadoop2.7/sbin/start-master.sh - echo "return status is $?" -else - echo "return status is $?" -fi - - - -#Start HDFS-Check if the hadoop UI is up and running -if ! lsof -i:50070 -then - ~/Documents/hadoop-2.7.5/sbin/start-dfs.sh - echo "return status is $?" -else - echo "return status is $?" -fi - - - - -#Start Mongo Database //Need to test if there was an error in starting up, will $? be returning 1 or 0 -if ! lsof -i:27017 -then - mongod - echo "return status is $?" -else - echo "return status is $?" -fi - - -#Start DeepShare Services- Check if the 8080 port is used by any process -if ! lsof -i:8080 -then - node ~/Documents/api/app.js - echo "return status is $?" -else - echo "return status is $?" -fi - - -#Start Backend Services -#1.Scheduler.py -#2. Update_Job_Status.py - diff --git a/sbin/start-services.sh b/sbin/start-services.sh deleted file mode 100644 index 2b5f700..0000000 --- a/sbin/start-services.sh +++ /dev/null @@ -1,57 +0,0 @@ -#/!/bin/bash -# Script to start -# 1.Apache Spark-Master -# 2.HDFS -# 3.MongoDatabase -# 4.DeepShare Services -# 5.Backend Services - -echo "Welcome to DeepShare services" - -#Start Apache Spark Master- Check if the SPARK 8989 -if ! lsof -i:8989 -then - ~/Documents/spark-2.3.0-bin-hadoop2.7/sbin/start-master.sh - echo "return status is $?" -else - echo "return status is $?" -fi - - - -#Start HDFS-Check if the hadoop UI is up and running -if ! lsof -i:50070 -then - ~/Documents/hadoop-2.7.5/sbin/start-dfs.sh - echo "return status is $?" -else - echo "return status is $?" -fi - - - - -#Start Mongo Database //Need to test if there was an error in starting up, will $? be returning 1 or 0 -if ! lsof -i:27017 -then - mongod - echo "return status is $?" -else - echo "return status is $?" -fi - - -#Start DeepShare Services- Check if the 8080 port is used by any process -if ! lsof -i:8080 -then - node ~/Documents/api/app.js - echo "return status is $?" -else - echo "return status is $?" -fi - - -#Start Backend Services -#1.Scheduler.py -#2. Update_Job_Status.py - diff --git a/services/Executor_Services/Job_Status_Updater.py b/services/Executor_Services/Job_Status_Updater.py deleted file mode 100644 index e1fbc98..0000000 --- a/services/Executor_Services/Job_Status_Updater.py +++ /dev/null @@ -1,115 +0,0 @@ -#This service runs every 30 sec to update the status of the jobs which are submitted by the job_scheduler. -#Status of the job can be "Finished","Failed","Running" - -#Check the status of the jobs which are already submitted by the job_scheduler in FIFO manner. -# - - -import sys -import shlex -import os -import schedule -import time -import pprint -from bson import ObjectId -from pyspark import SparkContext -from pyspark import SparkConf -from pymongo import MongoClient -from urllib import request as requests -import json -import subprocess - - - -# check job status -def checkJobStatus(jobResults): - # check job status (Add corresponding code to fetch the job status) - jobStatus_url = requests.urlopen('http://131.252.209.102:8443/json/') - data = jobStatus_url.read() - encoding = jobStatus_url.info().get_content_charset('utf-8') - JSON_object = json.loads(data.decode(encoding)) - - # After submitting the Job through submitJob function, if the job status is checked before the job execution starts. - # checkJobStatus should be able to check the status of the job - # If there are no applications executed - - if len(JSON_object['completedapps']) != 0: - for job in jobResults: - #we can use hashmap to improve the performance. - for app in JSON_object['completedapps']: - if job['_id'] == str(app['_id']): - total_cores = app['cores'] - total_cost_memory = app['memoryperslave'] * prices[job['time_slot']].memory - print(str(total_cost_memory) + " total memory cost") - total_cost_cores = total_cores * prices[job['time_slot']].cpus - totalcost=total_cost_cores+total_cost_memory - print(str(total_cost_cores) + "total cores cost") - print("Job:" + job['name'] + "completed in duration " + str(job['duration'] / 3600000.0) + "hrs") - updateResult = jobs.update_one({"_id": ObjectId(str(job['_id']))}, {"$set": {"price": totalcost, "status": "COMPLETED", "updateOn": str(date.today())}}) - - - -def update_job_status(): - global todayDate,update_prices_required - #This block of code is executed when the script is run for the first time. - if (date.today() + timedelta(1) - todayDate).days == 1 and update_prices_required: - print("prices are downloaded into the array") - update_prices() - - #This block of code is executed when the date has changed and update the prices with the recent ones. - if (date.today() + timedelta(1) - todayDate).days != 1: - #update both todayDate and prices - print("updating both the prices and today date") - update_prices() - todayDate=date.today() - checkDB() - - - -def update_prices(): - global update_prices_required,prices - prices_url = requests.urlopen('http://131.252.209.102:8080/api/v1/pricing') - prices_data = prices_url.read() - encoding = prices_url.info().get_content_charset('utf-8') - prices_data_object = json.loads(prices_data.decode(encoding)) - if len(prices_data_object['prices']) != 0: - for price in prices_data_object['prices']: - prices.append(price) - update_prices_required = False - - - -#Check DB for scheduled jobs -def checkDB(): - print("Checking Database...") - jobResults = jobs.find({"$or":[ {"status":"Submitted"}, {"status":"RUNNING"}]}) - print(jobResults) - if jobResults.count() > 0: - print("found Jobs") - checkJobStatus(jobResults) -#exit - - -#Update module to update the records when jobs are submitted/finished -def updateJobStatus(doc,curr_status): - objectId = doc['_id'] - updateResult = jobs.update_one({"_id":ObjectId(str(objectId))}, {"$set":{"status":curr_status}}) - return updateResult -#exit - - -todayDate = date.today() -update_prices_required = True - -prices = [] -client = MongoClient('localhost', 27017) -db = client.ShareResources -jobs = db.jobs -#Schedule to check the job status -schedule.every(5).seconds.do(lambda: update_job_status()) - - -while True: - schedule.run_pending() - time.sleep(1) - diff --git a/services/Executor_Services/scheduler.py b/services/Executor_Services/scheduler.py deleted file mode 100644 index a4b8ad1..0000000 --- a/services/Executor_Services/scheduler.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import schedule -import time -from bson import ObjectId -from pymongo import MongoClient -import threading -from subprocess import Popen - -# Set environment variables(if not set by user) -if not os.environ.get('SPARK_HOME'): - os.environ['SPARK_HOME'] = "/home/spark/Documents/spark-2.3.0-bin-hadoop2.7" -if not os.environ.get('TFoS_HOME'): - os.environ['TFoS_HOME'] = "/home/spark/Documents/TensorFlowOnSpark" - - -master = "spark://131.252.209.102:8989" - -# Global variables to ensure one job is running -JOB_STATUS = 0 -JOB_ID = 0 - -# SPARK_SUBMIT -# spark_submit_train = r'''$SPARK_HOME/bin/spark-submit --master {MASTER} --conf spark.app.name={NAME} --conf spark.executorEnv.LD_LIBRARY_PATH=$JAVA_HOME/lib/amd64/server:$HADOOP_HOME/lib/native --conf spark.executorEnv.CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob):$CLASSPATH --conf spark.executorEnv.HADOOP_HDFS_HOME=$HADOOP_HOME --py-files hdfs://{SOURCE_FILES} --conf spark.cores.max={TOTAL_CORES} --conf spark.task.cpus={CORES_PER_WORKER} --conf spark.executor.memory={MEMORY}g --conf spark.executorEnv.JAVA_HOME=$JAVA_HOME $TFoS_HOME/examples/mnist/spark/mnist_spark.py --cluster_size={SPARK_WORKER_INSTANCES} --images {IMAGES}/images --labels {LABELS}/labels --format csv --mode train --model mnist_model''' -#spark_submit_train = r'''/home/spark/Documents/spark-2.3.0-bin-hadoop2.7/bin/spark-submit --master {MASTER} --conf spark.app.name={NAME} --conf spark.executorEnv.LD_LIBRARY_PATH=$JAVA_HOME/lib/amd64/server:$HADOOP_HOME/lib/native --conf spark.executorEnv.CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob):$CLASSPATH --conf spark.executorEnv.HADOOP_HDFS_HOME=$HADOOP_HOME --py-files hdfs://{SOURCE_FILES} --conf spark.cores.max={TOTAL_CORES} --conf spark.task.cpus={CORES_PER_WORKER} --conf spark.executor.memory={MEMORY}g --conf spark.executorEnv.JAVA_HOME=$JAVA_HOME /home/spark/Documents/TensorFlowOnSpark/examples/mnist/spark/mnist_spark.py --cluster_size={SPARK_WORKER_INSTANCES} --images {IMAGES}/images --labels {LABELS}/labels --format csv --mode train --model mnist_model ''' - -spark_submit_train = r'''echo $SPARK_HOME ''' - -# Database connection -client = MongoClient('localhost', 27017) -db = client.ShareResources -jobs = db.jobs - - -# Check DB for scheduled jobs -def checkDB(): - print("Checking Database...") - jobResults = jobs.find({"status": "Scheduled"}).sort("created_on") - if jobResults.count() > 0: - id = jobResults[0]['_id'] - doc = jobResults[0] - updateResult = jobs.update_one({"_id": ObjectId(str(id))}, {"$set": {"status": "Submitted"}}) - job__submission__thread = threading.Thread(name='daemon' + str(id), target=submit_job(doc)) - job__submission__thread.daemon = True - job__submission__thread.start() - - -# exit - -# Submit job to SPARK -def submit_job(job): - global JOB_STATUS - global JOB_ID - - if JOB_STATUS == 0: - # Submit the first job scheduled - - # Set all the parameters for spark-submit command - # data_files = [] - input_files = job['input_files'][0] - source_files = job['source_files'][0] - memory = job['memory'] - cores = job['cores'] - workers = job['workers'] - total_cores = int(cores) * int(workers) - name = job['_id'] - print(input_files+":workers") - - cmd_call = spark_submit_train.format(MASTER=master, - NAME=name, - SOURCE_FILES=source_files, - TOTAL_CORES=total_cores, - SPARK_WORKER_INSTANCES=workers, - MEMORY=memory, - CORES_PER_WORKER=cores, - IMAGES=input_files, - LABELS=input_files - ) - - updateResult = jobs.update_one({"_id": ObjectId(str(name))}, {"$set": {"status": "Running"}}) - JOB_ID = str(job['_id']) - JOB_STATUS = 1 - # result = os.system(cmd_call) - - try: - cmd_arr = [cmd_call] - Popen(cmd_arr) - - JOB_ID = 0 - JOB_STATUS = 0 - except Exception as e: - print(e) - pass - - -# Schedule to check the database for any scheduled jobs -schedule.every(15).seconds.do(checkDB) - -while True: - schedule.run_pending() - time.sleep(5) - diff --git a/services/api/config/config.js b/services/api/config/config.js deleted file mode 100644 index 8d3d2e7..0000000 --- a/services/api/config/config.js +++ /dev/null @@ -1,38 +0,0 @@ -/** - * @fileoverview This file contains configuration details for the application. - * Namely, those are: - * - Parameters for the DB name and location - * - Json Web Token signature keys and salt parameters - * - Standard pathing for the routes, models, and controller modules - * - Possible job status' (Although this unlikely won't be used here) - * - * @exports {config} The applications configuration object. - */ - -"use strict"; - -const config = {}; - -// Config parameters of the database. -config.API_ENDPOINT = "/api/v1"; -config.DB_URI = "mongodb://localhost/ShareResources"; -config.DB_TEST_URI = "mongodb://localhost/test"; -config.DATABASE = "SHARE_RESOURCES"; -config.JWT_KEY = "$h!r#res0urces"; // TODO: probably use env var instead? -config.SALT_ROUNDS = 10; - -// Config for the PATHS. Make sure to check the below paths that are commonly used. -config.ROUTES_PATH = "./api/routes"; -config.CONTROLLERS_PATH = "../controllers"; -config.MODELS_PATH = "./api/models"; -config.APPLICATION_CONFIG = "./api/config"; - -config.JOB_STATUS = { - SCHEDULED: "Scheduled", - // PENDING: "Pending", // Voted to remove this in lieu of scheduled - ACTIVE: "Active", - FINISHED: "Finished", - FAILED: "Failed", -}; - -module.exports = config; \ No newline at end of file diff --git a/services/api/controllers/auth_controller.js b/services/api/controllers/auth_controller.js deleted file mode 100644 index e751c7b..0000000 --- a/services/api/controllers/auth_controller.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; - -const config = require('../config/config'); - -let jwt = require('jsonwebtoken'); -let bcrypt = require('bcrypt'); -let customer = require('../models/customer_model'); - -// Authenticate user; return token -exports.login = (req, res) => { - let message; - let status = 200; - let token = ""; - let email = req.body.email; - let plaintext_password = req.body.password; - - // TODO: man this is janky - customer.findOne({"email": email}, "email password", (err, user) => { - if (err) { - message = "Failed to log in.\nPlease verify your email/password combination."; - status = 400; - } else if(!user) { - message = "The provided email/password combination could not be found"; - status = 401; - res.status(status).json({ - success: !!user, - error: err ? err : null, - message: message, - token: null, - auth: false, - }); - } else { - message = "Login successful"; - bcrypt.compare(plaintext_password, user.password).then(auth => { - if(auth) { - token = jwt.sign({id: user._id}, config.JWT_KEY); - } - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - token: token, - auth: true, - }); - }).catch(err => { - console.log(`Error in auth controller: ${err.name}`); - message = "Failed to log in.\nPlease verify your email and password"; - status = 401; - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - token: null, - auth: false, - }); - }); - } - }); -}; - -exports.logout = (req, res) => { - let message = "Successfully logged out."; - let status = 200; - - res.status(status).json({ - success: true, - error: null, - message: message, - token: null, - auth: false, - }) -}; \ No newline at end of file diff --git a/services/api/controllers/customer_controller.js b/services/api/controllers/customer_controller.js deleted file mode 100644 index 2b15981..0000000 --- a/services/api/controllers/customer_controller.js +++ /dev/null @@ -1,123 +0,0 @@ -"use strict"; - -const config = require('../config/config'); -let jwt = require('jsonwebtoken'); -let bcrypt = require('bcrypt'); -let customer = require('../models/customer_model'); - -exports.get_customer_by_id = (req, res) => { - let message; - let status = 200; - - customer.findById(req.user_id, (err, customer) => { - if (err) { - message = `Failed to get customer information.\nError: ${err.name}: ${err.message}`; - status = 500; - } else if (!customer) { - status = 400; - message = `Failed to get customer information.\nThe user with id '${req.user_id}' could not be found.`; - } else { - message = "Successfully fetched customer information."; - } - - res.status(status).json({ - success: !!customer, // `!!` is shorthanded boolean conversion - error: err ? err : null, - message: message, - customer: customer, - }) - }); -}; - -/* Add a new customer to the collection */ -exports.addcustomer = (req, res) => { - let user; - let message = ""; - let status = 200; - - bcrypt.hash(req.body.password, config.SALT_ROUNDS, (err, hash) => { - user = new customer({ - firstname: req.body.firstname, - lastname: req.body.lastname, - email: req.body.email, - password: hash, - status: "Active", - }); - - user.save((err, new_user) => { - if (err) { - if (err.code === 11000) { - message = `Failed to create account.\nThe email '${req.body.email}' is already in use.`; - status = 400; - } else { - message = `Failed to create account.\nError: ${err.name}: ${err.message}.`; - status = 500; - } - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - token: null, - // auth: true, // TODO: Not sure about this yet - }); - } else { - message = "Successfully created account."; - - let jwt_payload = { - // email: req.body.email, - id: new_user._id, - }; - jwt.sign(jwt_payload, config.JWT_KEY, {expiresIn: '24h'}, (err, token) => { - if (err) { - status = 400; - message = "Failed to create authentication token." - } - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - token: token, - user: new_user, - // auth: true, // TODO: Not sure about this yet - }); - }); - } - }); - }); -}; - -exports.updateprofilebyid = (req, res) => { - let status = 501; - let message = "NOT IMPLEMENTED"; - - res.status(status).json({ - success: false, - error: null, - message: message, - }); -}; - -// TODO: rename this -exports.deletecustomerbyid = (req, res) => { - let message = ""; - let status = 200; - - customer.findOneAndDelete({_id: req.user_id}, (err, customer) => { - if(err) { - status = 500; - message = `Failed to remove user.\nError: ${err.name}.`; - } else if(!customer) { - // TODO: if customer is null this 'fails' silently. As in, it doesn't set err. Let client know? - status = 400; - message = `Failed to remove user.\nCould not find customer id.`; - } else { - message = "Successfully removed user."; - } - - res.status(status).json({ - success: !!customer, - error: err ? err : null, - message: message, - }); - }); -}; \ No newline at end of file diff --git a/services/api/controllers/jobs_controller.js b/services/api/controllers/jobs_controller.js deleted file mode 100644 index f1a849a..0000000 --- a/services/api/controllers/jobs_controller.js +++ /dev/null @@ -1,140 +0,0 @@ -/** - * - * @fileoverview This file implements the logic for the job endpoints. - * It is dependent on the job route module. - * - */ - -"use strict"; - -const config = require('../config/config'); -const customer = require("../models/customer_model"); -const jobs = require('../models/job_model'); - -exports.get_all_jobs = (req, res) => { - let status = 200; - let message = ""; - - jobs.find({customer_id: req.user_id}, (err, jobs) => { - if (err) { - message = `There was an error while trying to retrieve the jobs for the customer: - ${req.user_id}\nError: ${err.name}`; - status = 401; - } else { - message = "Successfully retrieved jobs."; - } - res.status(status).json({ - success: !!jobs, // `!!` is shorthand boolean conversion - error: err ? err : null, - message: message, - jobs: jobs - }) - }); -}; - -/* GET ALL jobs BY CUSTOMER_ID */ -exports.get_job_by_job_id = (req, res) => { - let status = 200; - let message = ""; - - jobs.find({ - customer_id: req.user_id, - id: req.params.job_id, - }, (err, job) => { - if (err) { - message = `There was an error while trying to retrieve that specific job.\nError: ${err.name}`; - status = 401; - } else { - message = "Successfully retrieved job."; - } - job.updated_on = Date.now(); - res.status(status).json({ - success: !!job, - error: err ? err : null, - message: message, - job: job, - }) - }); -}; - -/* ADD NEW Jobs UNDER THE USER ACCOUNT */ -exports.add_job = (req, res) => { - let status = 200; - let message = ""; - - let job = new jobs({ - workers: req.body.workers, - cores: req.body.cores, - memory: req.body.memory, - timeslot_id: req.body.timeslot_id, - status: config.JOB_STATUS.SCHEDULED, - price: req.body.price, - customer_id: req.user_id, - created_on: Date.now(), - updated_on: Date.now(), - created_by: req.user_id, - updated_by: req.user_id, - }); - - // HDFS path where the files are uploaded, before submitting the job - job.source_files.push(req.body.source_files); - job.input_files.push(req.body.input_files); - - // Decrement users credits accordingly. - let customer_promise = customer.findOneAndUpdate({_id: req.user_id}, {$inc: {credits: -req.body.price}}).exec(); - customer_promise.catch(err => { - message = `There was an error charging your account.\nError: ${err.name}: ${err.message}\n`; - status = 500; - }); - - job.save((err, job) => { - if (err) { - message += `There was an error while trying to add the job to the queue.\nError: ${err.name}`; - status = 500; - } else { - message = "Successfully added job to the queue."; - } - res.status(status).json({ - success: !!job, - error: err ? err : null, - message: message, - job: job - }); - }); - -}; - -/* UPDATE job DETAILS */ -exports.update_job_status_by_job_id = (req, res) => { - let status = 501; - let message = "Not Implemented"; - - res.status(status).json({ - success: true, - error: null, - message: message, - }); -}; - -/* DELETE job by using the job and customer id's provided by the client */ -exports.delete_job_by_job_id = function(req, res) { - let status = 200; - let message = ""; - - jobs.remove({ - _id: req.params.job_id, - customer_id: req.user_id, - }, (err) => { - if (err) { - message = `There was an error while trying to delete that specific job.\nError: ${err.name}`; - status = 401; - } else { - message = "Job successfully deleted."; - } - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - }); - }); -}; \ No newline at end of file diff --git a/services/api/controllers/pricing_controller.js b/services/api/controllers/pricing_controller.js deleted file mode 100644 index b5b33d4..0000000 --- a/services/api/controllers/pricing_controller.js +++ /dev/null @@ -1,101 +0,0 @@ -/** - * - * @fileoverview This file implements the logic for the pricing endpoints. - * It is dependent on the pricing route module. - * - */ - -"use strict"; - -const Prices = require('../models/pricing_model'); - -exports.get_prices = (req, res) => { - let message = ""; - let status = 200; - - let now = new Date(); - let midnight = new Date(now.getFullYear(), now.getMonth(), now.getDate(), 0, 0, 0); - midnight.setUTCHours(0, 0, 0, 0); - - // Instantiate date as midnight 'yesterday' so we don't get Unix epoch - let midnight_tomorrow = new Date(midnight.getTime()); - - // Update time with 1 day delta - midnight_tomorrow.setDate(midnight_tomorrow.getDate() + 1); - - console.log(midnight, midnight_tomorrow); - - Prices.find({ - // Find prices generated withing the current day's time frame - createdOn: { - $gte: midnight, - $lte: midnight_tomorrow - }, - // There should never be time slots outside this range but why not - time_slot: { - $gte: 0, - $lte: 3 - } - }, (err, prices) => { - if(err) { - message = `There was an error while retrieving prices.\nError: ${err.name}`; - status = 500; - } else if (!prices) { - message = "Prices have not been generated yet."; - status = 301; - } else { - message = "Prices retrieved successfully."; - } - res.status(status).json({ - success: !!prices && prices.length, - error: err ? err : null, - message: message, - prices: prices, - }); - }); - -}; - -exports.add_price = (req, res) => { - let message, price; - let status = 200; - let id = req.user_id; - - // For some reason this has to be initialized earlier to work. - price = new Prices({ - time_slot: req.body.time_slot, - cpus: req.body.cpus, - gpus: req.body.gpus, - memory: req.body.memory, - disk_space: req.body.price, - created_by: id, - updated_by: id, - }); - - price.save((err, new_price_field) => { - if(err) { - if (err.code === 11000) { - message = `There was an error while adding the price field.\nError: ${err.name}`; - } else { - message = "There was an error while adding the price field."; - } - status = 500; - } else { - message = "Price field added successfully."; - } - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - data: new_price_field ? new_price_field : null, - }); - }); -}; - -exports.update_price = (req, res) => { - res.status(501).json({ - success: true, - error: null, - message: "NOT IMPLEMENTED", - }); -}; diff --git a/services/api/controllers/resource_controller.js b/services/api/controllers/resource_controller.js deleted file mode 100644 index 5704507..0000000 --- a/services/api/controllers/resource_controller.js +++ /dev/null @@ -1,131 +0,0 @@ -/** - * - * @fileoverview This file implements the logic for the resource endpoints. - * It is dependent on the resource route module. - * - */ - - -"use strict"; - -const Resources = require('../models/resource_model'); - -// Deprecated as of 5/22 -// exports.getallresources = function(req, res) { -// let message = ""; -// let status = 200; -// -// Resources.find({}, (err, resources) => { -// if(err) { -// message = `There was an error while retrieving all of the resources.\nError: ${err.name}`; -// status = 500; -// } else { -// message = "Resources retrieved successfully."; -// } -// res.status(status).json({ -// success: !!resources, -// error: err ? err : null, -// message: message, -// resources: resources, -// }) -// }); -// }; - -exports.get_resources_by_customer_id = (req, res) => { - let message = ""; - let status = 200; - let id = req.user_id; - - Resources.find({owner: id}, (err, resources) => { - if(err) { - message = `There was an error while retrieving all of your resources.\nError: ${err.name}`; - status = 500; - } else { - message = "Resources retrieved successfully."; - } - res.status(status).json({ - success: !!resources, - error: err ? err : null, - message: message, - resources: resources, - }); - }); - -}; - -exports.add_resource_by_customer_id = (req, res) => { - let message, resource; - let status = 200; - let id = req.user_id; - - // For some reason this has to be initialized earlier to work. - resource = new Resources({ - ip_address: req.body.ip_address, - ram: req.body.ram, - cores: req.body.cores, - cpus: req.body.cpus, - gpus: req.body.gpus, - status: "Online", - price: req.body.price, // TODO: this may need to be determined server side - owner: id, - createdBy: id, - updatedBy: id, - machine_name: req.body.machine_name - }); - - resource.save((err, new_resource) => { - if(err) { - if (err.code === 11000) { - message = `There was an error while adding the resource.\nError: ${err.name}`; - } else { - message = "There was an error while adding the resource."; - } - status = 500; - } else { - message = "Resource added successfully."; - } - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - resource: new_resource ? new_resource : null, - }); - }); -}; - -exports.update_resource_by_customer_id = (req, res) => { - res.status(501).json({ - success: true, - error: null, - message: "NOT IMPLEMENTED", - }); -}; - -exports.delete_resource_by_id = (req, res) => { - let message; - let status = 200; - let id = req.user_id; - - Resources.remove({ - owner: id, - _id: req.params.resource_id, - }, (err) => { - if(err) { - message = `There was an error while deleting the resource: ${req.body.resource_id}.\nError: ${err.name}`; - status = 500; - } else { - message = "Resource deleted successfully"; - } - - // Housekeeping - if(req.body.resource_id) { - message += "\nNOTICE: This endpoint is being deprecated. Please pass 'resource_id' as the endpoint." - } - - res.status(status).json({ - success: !err, - error: err ? err : null, - message: message, - }) - }); -}; \ No newline at end of file diff --git a/services/api/controllers/verifyToken.js b/services/api/controllers/verifyToken.js deleted file mode 100644 index 604abd7..0000000 --- a/services/api/controllers/verifyToken.js +++ /dev/null @@ -1,47 +0,0 @@ -/** - * - * @fileoverview this file implements the jwt verification middleware. - * It expects the user send the token via the `x-access-token` header. - * It's dependent on the `jsonwebtoken` library. - * - */ - -"use strict"; - -const config = require('../config/config'); -let jwt = require('jsonwebtoken'); - -function verifyToken(req, res, next) { - let token = req.headers['x-access-token']; - if(!token) { - return res.status(401).json({ - success: false, - err: null, - message: 'No token provided.', - }); - } - - jwt.verify(token, config.JWT_KEY, (err, decoded) => { - if(err) { - return res.status(400).json({ - success: false, - error: err ? err : null, - message: 'Failed to authenticate with provided token.', - }); - } - // TODO: Deprecated. - if(decoded.id === undefined) { - return res.status(500).json({ - success: false, - error: null, - message: "This token is DEPRECATED. Please dump your db and generate new tokens at `/api/v1/auth/login`.", - }); - } - - // Save customer's unique id to the request object. - req.user_id = decoded.id; - next(); - }); -} - -module.exports = verifyToken; \ No newline at end of file diff --git a/services/api/models/customer_model.js b/services/api/models/customer_model.js deleted file mode 100644 index 89b75b3..0000000 --- a/services/api/models/customer_model.js +++ /dev/null @@ -1,54 +0,0 @@ -/** - * @fileoverview This file contains the definition of the customer schema. - * It is dependent on mongoose. - * @exports {customerSchema} The customer schema definition. - */ - - -"use strict"; - -let mongoose = require('mongoose'); -let Schema = mongoose.Schema; - -let customerSchema = new Schema({ - firstname: { - type: String, - required: true - }, - lastname: { - type: String, - required: true - }, - email: { - type: String, - required: true, - unique: true - }, - password: { - type: String, - required: true - }, - status: { - type: String, - required: true - }, - credits: { - type: Number, - required: true, - default: 20.0, - min: 0, - }, - createdOn: { - type: Date, - default: Date.now - }, - updatedOn: { - type: Date, - default: Date.now - }, -}); - -mongoose.model('Customers', customerSchema); // Register model -let customers = mongoose.model('Customers'); // instantiate model - -module.exports = customers; diff --git a/services/api/models/job_model.js b/services/api/models/job_model.js deleted file mode 100644 index 3394a38..0000000 --- a/services/api/models/job_model.js +++ /dev/null @@ -1,87 +0,0 @@ -/** - * @fileoverview This file contains the definition of the jobs schema. - * It is dependent on mongoose. - * @exports {jobSchema} The job schema definition. - */ - -"use strict"; - -let mongoose = require('mongoose'); -let Schema = mongoose.Schema; - -let jobSchema = new Schema({ - workers: { - type: String, - required: true - }, - cores: { - type: String, - required: true - }, - memory: { - type: String, - required: true - }, - source_files: { - type: [String], - required: true - }, - input_files: { - type: [String], - required: true - }, - status: { - type: String, - default: "PENDING", - required: true - }, - start_time: { - type: Date - }, - end_time: { - type: Date - }, - logs: { - type: [String] - }, - customer_id: { - type: mongoose.Schema.Types.ObjectId, - required: true - }, - //To keep track of the resources that are used to run the job - resources: [{ - resource_id: String, - price: Number - }], - timeslot_id: { - type: Number, - required: true - }, - price: { - type: Number, - required: true, - }, - created_on: { - type: Date, - default: Date.now, - required: true - }, - updated_on: { - type: Date, - default: Date.now - }, - created_by: { - type: mongoose.Schema.Types.ObjectId, - required: true - }, - updated_by: { - type: mongoose.Schema.Types.ObjectId, - required: true - } -}); - -// Create a model to interact with from the schema -mongoose.model('Jobs', jobSchema); -let jobs = mongoose.model('Jobs'); - -module.exports = jobs; diff --git a/services/api/models/pricing_model.js b/services/api/models/pricing_model.js deleted file mode 100644 index 2135e37..0000000 --- a/services/api/models/pricing_model.js +++ /dev/null @@ -1,54 +0,0 @@ -/** - * @fileoverview This file contains the definition of the pricing schema. - * It is dependent on mongoose. - * @exports {pricingSchema} The price schema definition. - */ - -"use strict"; - -let mongoose = require('mongoose'); -let Schema = mongoose.Schema; - -let priceSchema = new Schema({ - // Combination of time_slot and created_on fields will give the price details for that particular day - time_slot: { - type: Number, - required: true, - }, - cpus: { - type: Number, - required: true, - min: 0, - }, - gpus: { - type: Number, - required: true, - min: 0, - }, - memory: { - type: Number, - required: true, - min: 0, - }, - disk_space: { - type: Number, - required: true, - min: 0, - }, - createdOn: { - type: Date, - default: Date.now, - required: true - }, - updatedOn: { - type: Date, - default: Date.now - }, -}); -// TODO: Verify model works w/o the following. -//}, {collection: "pricing"}); - -mongoose.model('Price', priceSchema); -let prices = mongoose.model('Price'); - -module.exports = prices; diff --git a/services/api/models/resource_model.js b/services/api/models/resource_model.js deleted file mode 100644 index c09eaa2..0000000 --- a/services/api/models/resource_model.js +++ /dev/null @@ -1,77 +0,0 @@ -/** - * TODO -*/ - -let mongoose = require('mongoose'); -let Schema = mongoose.Schema; - -let resourceSchema = new Schema({ - ip_address: { - type: String, - trim: true, - unique: true, - required: true - }, - ram: { - type: Number, - min: 0, - required: true - }, - cores: { - type: Number, - min: 0, - required: true - }, - cpus: { - type: Number, - min: 0, - required: true - }, - gpus: { - type: Number, - min: 0, - }, - status: { - type: String, - required: true - }, - price: { - type: Number, - }, - active_from: { - type: Date, - default: Date.now - }, - active_to: { - type: Date, - default: Date.now - }, - owner: { - type: Schema.Types.ObjectId, - }, - createdOn: { - type:Date, - default: Date.now - }, - updatedOn: { - type: Date, - default: Date.now - }, - createdBy: { - type: Schema.Types.ObjectId, - required: true - }, - updatedBy: { - type: Schema.Types.ObjectId, - required: true - }, - machine_name: { - type: String, - required: true - } -}); - -mongoose.model('Resources', resourceSchema); // Register model -let resources = mongoose.model('Resources'); // Instantiate model - -module.exports = resources; \ No newline at end of file diff --git a/services/api/routes/auth_route.js b/services/api/routes/auth_route.js deleted file mode 100644 index edfac49..0000000 --- a/services/api/routes/auth_route.js +++ /dev/null @@ -1,15 +0,0 @@ - -"use strict"; - -let router = require('express').Router(); - -const config = require('../config/config'); -const verifyToken = require(`${config.CONTROLLERS_PATH}/verifyToken`); -const authController = require(`${config.CONTROLLERS_PATH}/auth_controller`); - -/* Create a new customer */ -router.post('/login', authController.login); - -router.post('/logout', verifyToken, authController.logout); - -module.exports = router; diff --git a/services/api/routes/customer_route.js b/services/api/routes/customer_route.js deleted file mode 100644 index c85a5e4..0000000 --- a/services/api/routes/customer_route.js +++ /dev/null @@ -1,28 +0,0 @@ -/** - * @fileoverview This file implements the routes for the customer endpoints. - * It route's extend from the `/customer` endpoint - * @exports {customer router} - */ - -"use strict"; - -let router = require('express').Router(); - -const config = require('../config/config.js'); -const verifyToken = require(`${config.CONTROLLERS_PATH}/verifyToken`); -let customer_controller = require(`${config.CONTROLLERS_PATH}/customer_controller`); - - -/* GET ALL RESOURCES */ -router.get('/', verifyToken, customer_controller.get_customer_by_id); - -// Create a new customer -router.post('/', customer_controller.addcustomer); - -// Update a customer's details -router.put('/', verifyToken, customer_controller.updateprofilebyid); - -// Delete a customer's account -router.delete('/', verifyToken, customer_controller.deletecustomerbyid); - -module.exports = router; diff --git a/services/api/routes/jobs_route.js b/services/api/routes/jobs_route.js deleted file mode 100644 index 048b00e..0000000 --- a/services/api/routes/jobs_route.js +++ /dev/null @@ -1,25 +0,0 @@ -/** - * - * @fileoverview This file defines the api endpoints for jobs. - * - */ - -"use strict"; - -const router = require('express').Router(); -const config = require('../config/config'); - -const verifyToken = require(`${config.CONTROLLERS_PATH}/verifyToken`); -const jobs_controller = require(`${config.CONTROLLERS_PATH}/jobs_controller`); - -router.get('/', verifyToken, jobs_controller.get_all_jobs); - -router.get('/:job_id', verifyToken, jobs_controller.get_job_by_job_id); - -router.post('/', verifyToken, jobs_controller.add_job); - -router.put('/:job_id', verifyToken, jobs_controller.update_job_status_by_job_id); - -router.delete('/:job_id', verifyToken, jobs_controller.delete_job_by_job_id); - -module.exports = router; diff --git a/services/api/routes/pricing_route.js b/services/api/routes/pricing_route.js deleted file mode 100644 index ec075a8..0000000 --- a/services/api/routes/pricing_route.js +++ /dev/null @@ -1,17 +0,0 @@ - -"use strict"; - -let express = require('express'); -let router = express.Router(); -const config = require('../config/config'); - -let verifyToken = require(`${config.CONTROLLERS_PATH}/verifyToken`); -let price_controller = require(`${config.CONTROLLERS_PATH}/pricing_controller`); - -router.get('/', price_controller.get_prices); - -// router.post('/', verifyToken, price_controller.add_price); - -// router.put('/price_id', verifyToken, price_controller); - -module.exports = router; diff --git a/services/api/routes/resource_route.js b/services/api/routes/resource_route.js deleted file mode 100644 index a306eda..0000000 --- a/services/api/routes/resource_route.js +++ /dev/null @@ -1,27 +0,0 @@ -/** - * - * @fileoverview This file defines the api endpoints for resources. - * - */ - -"use strict"; - -const router = require('express').Router(); -const config = require('../config/config'); - -const verifyToken = require(`${config.CONTROLLERS_PATH}/verifyToken`); -const resources_controller = require(`${config.CONTROLLERS_PATH}/resource_controller`); - -// TODO: really can't think of a use for this -// router.get('/', resources_controller.getallresources); - -router.get('/', verifyToken, resources_controller.get_resources_by_customer_id); - -router.post('/', verifyToken, resources_controller.add_resource_by_customer_id); - -router.put('/', verifyToken, resources_controller.update_resource_by_customer_id); - -router.delete('/', verifyToken, resources_controller.delete_resource_by_id); -router.delete('/:resource_id', verifyToken, resources_controller.delete_resource_by_id); - -module.exports = router; diff --git a/services/api/script_mongodb.txt b/services/api/script_mongodb.txt deleted file mode 100644 index 9736343..0000000 --- a/services/api/script_mongodb.txt +++ /dev/null @@ -1,164 +0,0 @@ -0. Database structure - -*: required attribute - -The attribute names are different from the below description. - -Collection: - -*First name -*Last name -*Email id -*Password -*Object_id (Unique, auto-generated by MongoDB) -*status -*createdOn -updatedOn - -Collection: - -*Object_id (Unique, auto-generated by MongoDB) -*Workers -*Cores -*Memory -*Source_files -*input_files -*Status -*IP_address -Start_time -End_Time -Logs -*CustomerID : Ref to customer -resource [id, Price] : Ref to resource -*createdOn -updatedOn -*createdBy -updatedBy - - -Collection: - -*Object_id (Unique, auto-generated by MongoDB) -*IP address -*Ram -*Cores  -CPUs -GPUs  -*Status -*Price -*Active_from -Active_to -owner : Ref to customer -*createdOn -updatedOn -*createdBy -updatedBy - - -Create Customer Collection - -use ShareResources - -db.createCollection("customer", { validator: { $jsonSchema: { bsonType: "object", required: [ "firstname", "lastname", "emailid", "password", "createdOn", "status"], properties: { firstname: { bsonType: "string", description: "must be a string and is required" }, lastname: { bsonType: "string", description: "must be a string and is required" }, emailid: { bsonType: "string", description: "must be a string and is required" }, password: { bsonType: "string", description: "must be a string and is required" }, createdOn: { bsonType: "date", description: "must be a date and is required" }, updatedOn: { bsonType: "date", description: "must be a date and is not required" }, status: { bsonType: "string", description: "must be a string and it should be either Active, InActive or HDFS_InActive and is required" } } } } }) - -2. Insert Customer - -db.customer.insertOne({firstname:"Haritha",lastname:"Munagala",emailid:"mharitha@pdx.edu",password:"Passw0rd", createdOn:new Date(Date.now()), status:"Active"}) - -3. Show the result - -db.customer.find() - -Result: - -{ "_id" : ObjectId("5ababf6de538724ad170b528"), "firstname" : "Haritha", "lastname" : "Munagala", "emailid" : "mharitha@pdx.edu", "password" : "Passw0rd", "createdOn" : ISODate("2018-03-27T22:02:21.946Z"), "status" : "Active" } - -4. Create Resource Collection - -db.createCollection("resources", { validator: { $jsonSchema: { bsonType: "object", required: [ "ip_address", "ram", "cores", "status" ,"price", "active_from", "createdOn", "createdBy"], properties: { ip_address: { bsonType: "string", description: "must be a string and is required" }, ram: { bsonType: "int", description: "must be an int and is not required" }, cores: { bsonType: "int", description: "must be an int and is required" }, cpus: { bsonType: "int", description: "must be an int" }, gpus: { bsonType: "int", description: "must be an int" }, status: { bsonType: "string", description: "must be a string and it should be either Offline or Online and is required" }, price: { bsonType: "int", description: "must be an int and is required" }, active_from: { bsonType: "date", description: "must be a date and is required" }, active_to: { bsonType: "date", description: "must be a date" }, owner: { bsonType: "objectId", description: "must be an objectId" }, createdOn: { bsonType: "date", description: "must be a date and is required" }, updatedOn: { bsonType: "date", description: "must be a date and is not required" }, createdBy: { bsonType: "objectId", description: "must be an objectId and is required" }, updatedBy: { bsonType: "objectId", description: "must be an objectId and is not required" }}}}}) - -5. Insert Resource - WriteError: Document failed validation - -db.resources.insertOne({ ip_address: "10.456.345.566", ram: NumberInt(2), cores: NumberInt(2), status: "Offline", price: NumberInt(15), active_from: new Date(Date.now()), createdOn: new Date(Date.now()), createdBy: ObjectId("5abab56ee538724ad170b523") }) - -6. Show Resource - -db.resources.find() - -Result-> - -{ "_id" : ObjectId("5abb10a2e538724ad170b534"), "ip_address" : "10.456.345.566", "ram" : 2, "cores" : 2, "status" : "Offline", "price" : 15, "active_from" : ISODate("2018-03-28T03:48:50.766Z"), "createdOn" : ISODate("2018-03-28T03:48:50.766Z"), "createdBy" : ObjectId("5abab56ee538724ad170b523") } - -7. Create Job Collection - -db.createCollection("job", { validator: { $jsonSchema: { bsonType: "object", required: ["ip_address", "workers", "cores", "memory", "source_files", "input_files", "status", "customerId", "resources", "createdOn", "createdBy"], properties: { ip_address: { bsonType: "string", description: "must be a string and is required"}, workers: { bsonType: "int", description: "must be an int and is required"}, cores: { bsonType: "int", description: "must be an int and is required"}, memory: { bsonType: "int", description: "must be an int and is required" }, source_files: { bsonType: "string", description: "must be a string and is required" }, input_files: { bsonType: "string", description: "must be a string and is required" }, status: { bsonType: "string", description: "must be a string and it should be either Offline or Online and is required" }, logs: { bsonType: "string", description: "must be a string" }, start_time: { bsonType: "date", description: "must be a date" }, end_time: { bsonType: "date", description: "must be a date" }, customerId: { bsonType: "objectId", description: "must be an objectId" }, resources: { bsonType: ["array"], items: { properties: {resourceId: { bsonType: "objectId" }, resourcePrice: { bsonType: "int" } }} }, createdOn: { bsonType: "date", description: "must be a date and is required" }, updatedOn: { bsonType: "date", description: "must be a date and is not required" }, createdBy: { bsonType: "objectId", description: "must be an objectId and is required" }, updatedBy: { bsonType: "objectId", description: "must be an objectId and is not required" }}}}}) - -8. Insert Job - WriteError: Document failed validation - -db.job.insertOne({workers: NumberInt(2), cores: NumberInt(3), memory: NumberInt(100), source_files: "1.js", input_files: "2.js", status: "Online", ip_address: "10.444.345.566", customerId: ObjectId("5abab56ee538724ad170b523"), resources: [{resourceId: ObjectId("5abab56ee538724ad170b523"), resourcePrice: NumberInt(10)}, {resourceId: ObjectId("5abab76ee538724ad170b523"), resourcePrice: NumberInt(15)}], createdOn: new Date(Date.now()), createdBy: ObjectId("5abab56ee538724ad170b523") }) - -9. Show Job - -db.job.find() -Result -> - -{ "_id" : ObjectId("5abb10dae538724ad170b535"), "workers" : 2, "cores" : 3, "memory" : 100, "source_files" : "1.js", "input_files" : "2.js", "status" : "Online", "ip_address" : "10.444.345.566", "customerId" : ObjectId("5abab56ee538724ad170b523"), "resources" : [ { "resourceId" : ObjectId("5abab56ee538724ad170b523"), "resourcePrice" : 10 }, { "resourceId" : ObjectId("5abab76ee538724ad170b523"), "resourcePrice" : 15 } ], "createdOn" : ISODate("2018-03-28T03:49:46.942Z"), "createdBy" : ObjectId("5abab56ee538724ad170b523") } - -10. Update customer - 1) add, delete, read by keys - -1) Update - -db.customer.update( - { "_id" : ObjectId("5ababf6de538724ad170b528")}, - { - $set: { "password": "Passw4rd" }, - $currentDate: { "updatedOn": true } - } -) - -db.customer.find() - -{ "_id" : ObjectId("5ababf6de538724ad170b528"), "firstname" : "Haritha", "lastname" : "Munagala", "emailid" : "mharitha@pdx.edu", "password" : "Passw4rd", "createdOn" : ISODate("2018-03-27T22:02:21.946Z"), "status" : "Active", "updatedOn" : ISODate("2018-03-28T08:32:08.301Z") } - -2) Delete - -db.customer.remove( { "_id" : ObjectId("5ababf6de538724ad170b528") } ) -db.customer.remove( { "status" : "Active" } ) - -3) Read by keys - -db.customer.find({ "_id" : ObjectId("5abb54b6e538724ad170b536")}) - -11. Update resources - -db.resources.update( - { "_id" : ObjectId("5abb10a2e538724ad170b534")}, - { - $set: { "updatedBy": ObjectId("5abb10a2e538724ad170b534") }, - $currentDate: { "updatedOn": true } - } -) -db.resources.find() - -{ "_id" : ObjectId("5abb10a2e538724ad170b534"), "ip_address" : "10.456.345.566", "ram" : 2, "cores" : 2, "status" : "Offline", "price" : 15, "active_from" : ISODate("2018-03-28T03:48:50.766Z"), "createdOn" : ISODate("2018-03-28T03:48:50.766Z"), "createdBy" : ObjectId("5abab56ee538724ad170b523"), "updatedBy" : ObjectId("5abb10a2e538724ad170b534"), "updatedOn" : ISODate("2018-03-28T08:46:29.327Z") } - -12. Update job - -db.job.find() - -{ "_id" : ObjectId("5abb10dae538724ad170b535"), "workers" : 2, "cores" : 3, "memory" : 100, "source_files" : "1.js", "input_files" : "2.js", "status" : "Online", "ip_address" : "10.444.345.566", "customerId" : ObjectId("5abab56ee538724ad170b523"), "resources" : [ { "resourceId" : ObjectId("5abab56ee538724ad170b523"), "resourcePrice" : 10 }, { "resourceId" : ObjectId("5abab76ee538724ad170b523"), "resourcePrice" : 15 } ], "createdOn" : ISODate("2018-03-28T03:49:46.942Z"), "createdBy" : ObjectId("5abab56ee538724ad170b523") } - -db.job.update({ "_id" : ObjectId("5abb10dae538724ad170b535") }, { $set: { "resources" : [{ "resourceId" : ObjectId("5abb10a2e538724ad170b534"), "resourcePrice" : NumberInt(20) }] } }) - -13.Create Pricing Collection -db.createCollection("pricing", { validator:{ $jsonSchema:{ bsonType: "object",required: ["time_slot", "cpus", "gpus", "memory","disk_space","createdOn","updatedOn"],properties: { time_slot: { bsonType: "int", description:"must be a int and its value must be 0-3 inclusive"}, cpus: { bsonType: "double", description: "must be an floating number and is required"}, gpus: { bsonType: "double", description: "must be an double and is required"}, memory: { bsonType: "double", description: "must be an double and is required" }, disk_space: { bsonType: "double", description: "must be a double and is required" },createdOn: { bsonType: "date", description: "must be a date and is required" }, updatedOn: { bsonType: "date", description: "must be a date and is not required" }, createdBy: { bsonType: "objectId", description: "must be an objectId and is required" }, updatedBy: { bsonType: "objectId", description: "must be an objectId and is not required" }}}}}) - -14. Insert Pricing data -db.pricing.insertOne({time_slot:NumberInt(0), cpus:0.001,gpus:0.001,memory:00001, disk_space:0.001,createdOn:new Date(Date.now()),updatedOn: new Date(Date.now())}) - -15. Delete collections - - 1) db.job.drop() - 2) db.customer.drop() - 3) db.resources.drop() - 4) db.pricing.drop() diff --git a/services/app.js b/services/app.js deleted file mode 100644 index c36bc94..0000000 --- a/services/app.js +++ /dev/null @@ -1,109 +0,0 @@ -"use strict"; - -let express = require('express'); // Express server -let bodyParser = require('body-parser'); // HTTP body parsing middleware giving us access to `req.body` -let morgan = require('morgan'); // Logging middleware - -const config = require('./api/config/config.js'); // Configuration details -const db = require('./db.js'); -const resources = require(`${config.ROUTES_PATH}/resource_route.js`); // Resource endpoints -const jobs = require(`${config.ROUTES_PATH}/jobs_route.js`); // Job endpoints -const customer = require(`${config.ROUTES_PATH}/customer_route.js`); // Customer endpoints -const authenticate = require(`${config.ROUTES_PATH}/auth_route.js`); // Authentication endpoints -const pricing = require(`${config.ROUTES_PATH}/pricing_route`); // Pricing endpoints -const DEBUG = process.env.DEBUG || true; // flag for verbose console output - -// Strategically defines servers port in order of test, environment variable, and finally hardcoded. -const PORT = process.env.API_TEST ? 1234 : process.env.SERVER_PORT || 8080; - -let app = express(); -let router = express.Router(); - -// Show extended output in debug mode -let log_level = DEBUG ? "dev" : "tiny"; - -// Instantiate middleware -app.use(morgan(log_level)); -app.use(bodyParser.json()); -app.use(bodyParser.urlencoded({extended: false})); - -// Extend default endpoint to `/api/v1/` -app.use(config.API_ENDPOINT, router); - -// Redirect req's from `/` to `/api/v1/` -app.get('/', (req, res) => { - res.redirect(config.API_ENDPOINT) -}); - -// Display message at `/api/v1/` if the server is online -app.get(config.API_ENDPOINT, (req, res) => { - res.send("
API is:  
Online
"); -}); - -// Define the server's endpoints -router.use('/auth', authenticate); -router.use('/account', customer); -router.use('/resources', resources); -router.use('/jobs', jobs); -router.use('/pricing', pricing); - - -db.open_connection(config.DB_URI, DEBUG); - -// If the Node process ends, close the Mongoose connection -process.on('SIGINT', () => { - db.close_connection(); - process.exit(0); -}); - -/** - * This function should be used as a no-op placeholder. - * `()=>{}` is effectively equivalent to `Function.prototype`, - * however, it's heavily optimized in V8. - */ -const noop = ()=>{}; - -app.listen(PORT, () => { - DEBUG ? console.log(`Application open on port: ${PORT}.`) : noop; -}); - -/** - * This function should be used primarily by the test harness for the purpose - * of creating a new server. - * - * @returns {Promise} A promise that resolves the new server. - */ -let create_server = () => { - return new Promise((resolve, reject) => { - try { - app.listen(PORT, () => { - DEBUG ? console.log(`Application open on port: ${PORT}.`) : noop; - resolve(app); - }) - } catch (err) { - reject(err); - } - }); -}; - -/** - * This function should be used by the test harness for the purpose - * of forcefully stopping the server. - */ -let stop_server = (server) => { - return new Promise((resolve, reject) => { - try { - console.log(typeof server); - server.close(() => { - DEBUG ? console.log("Closing server.") : noop; - resolve(process.exit(0)); - }); - } catch(err) { - reject(err); - } - }); -}; - -// Expose server to external resources -module.exports.create = create_server; -module.exports.close = stop_server; diff --git a/services/db.js b/services/db.js deleted file mode 100644 index d491e65..0000000 --- a/services/db.js +++ /dev/null @@ -1,50 +0,0 @@ - -"use strict"; - -const config = require('./api/config/config'); -let mongoose = require('mongoose'); - -mongoose.set('bufferCommands', false); - - -// TODO: refactor so this is more OOP-esque... -let connect = async (db_name, debug=false) => { - - try { - // TODO: i.e. add this to the constructor - // Log mongoose events - ['open', 'disconnected'].forEach(db_event => { - mongoose.connection.on(db_event, () => { - if(debug) { - console.log(`NOTICE: Database is now ${db_event}.`); - } - }); - }); - - await mongoose.connect(db_name); - } catch(err) { - console.error(`ERROR: ${err}`); - - // Can't do much without a db connection; exit. - process.exit(1) - } finally { - process.on('SIGINT', () => { - mongoose.connection.close(() => { - if(debug) { - console.log('NOTICE: Closing database connection'); - } - process.exit(0); - }); - }); - } -}; - -// TODO: Add this to the destructor... -let close = () => { - mongoose.connection.close(() => { - console.log('NOTICE: Closing database connection'); - }) -}; - -module.exports.open_connection = connect; -module.exports.close_connection = close; \ No newline at end of file diff --git a/services/package.json b/services/package.json deleted file mode 100644 index 5b4b7e8..0000000 --- a/services/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "shareresources_services", - "version": "0.1.1", - "description": "", - "main": "app.js", - "directories": { - "doc": "docs" - }, - "dependencies": { - "bcrypt": "^3.0.0", - "express": "^4.16.2", - "jsonwebtoken": "^8.2.0", - "mongoose": "^5.1.1", - "morgan": "^1.9.0" - }, - "devDependencies": { - "body-parser": "^1.18.2", - "chai": "^4.1.2", - "chai-http": "^4.0.0", - "mocha": "^5.0.1", - "path": "^0.12.7", - "request": "^2.85.0", - "should": "^13.2.1" - }, - "scripts": { - "test": "./node_modules/.bin/mocha --exit" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/shared-systems/api.git" - }, - "author": "Susham Kumar", - "license": "ISC", - "bugs": { - "url": "https://github.com/shared-systems/api/issues" - }, - "homepage": "https://github.com/shared-systems/api#readme" -} diff --git a/services/test/test_account.js b/services/test/test_account.js deleted file mode 100644 index 32cc58c..0000000 --- a/services/test/test_account.js +++ /dev/null @@ -1,211 +0,0 @@ -"use strict"; - -let chai = require('chai'); -let chai_http = require('chai-http'); -let expect = chai.expect; - -chai.should(); -chai.use(chai_http); - -process.env.test = true; - -describe('Customer Account Interaction', function() { - const CUSTOMER_PAYLOAD = { - firstname: "Felix", - lastname: "Da Housecat", - email: "abc@123.com", - password: "password", - }; - - var server; - beforeEach("Instantiate server", () => { - server = require('../app').server; - }); - - afterEach("Tear down server", () => { - require('../app').stop(); - }); - - describe('Account Management', function() { - it('should add a new user account', function(done) { - chai.request(server).post('/api/v1/account') - .send(CUSTOMER_PAYLOAD) - .end(function(err, res) { - res.should.have.status(200); - res.body.should.be.a('object'); - // noinspection BadExpressionStatementJS - expect(res).to.be.json; - - ["success", "message", "error"].forEach(val => { - res.body.should.have.a.property(val); - }); - res.body.success.should.be.eql(true); - // expect(res.body.error).to.be.null(); - CUSTOMER_PAYLOAD.token = res.body.token; - done(); - }); - }); - it('should remove the customer by their unique `_id` attribute', function(done) { - chai.request(server).delete(`/api/v1/account/`) - .set('x-access-token', CUSTOMER_PAYLOAD.token) - .end(function(err, res) { - res.should.have.status(200); - res.body.should.be.a('object'); - // noinspection BadExpressionStatementJS - expect(res).to.be.json; - - ["success", "message", "error"].forEach(val => { - res.body.should.have.a.property(val); - }); - res.body.success.should.be.eql(true); - done(); - }); - }); - }); - - describe('Improper Account Management', function() { - //TODO: set this up to use before and after hooks instead of describe scenarios - describe('set up db for checking duplicate errors', function() { - it('should create a new user account', function (done) { - chai.request(server).post('/api/v1/account') - .send(CUSTOMER_PAYLOAD) - .end(function(err, res) { - res.should.have.status(200); - res.body.success.should.be.eql(true); - CUSTOMER_PAYLOAD.token = res.body.token; - done(); - }); - }); - }); - - describe('verify failures', function() { - it('should fail to add the same user account', function(done) { - chai.request(server).post('/api/v1/account') - .send(CUSTOMER_PAYLOAD) - .end(function(err, res) { - res.should.have.status(400); - res.body.should.be.a('object'); - // noinspection BadExpressionStatementJS - expect(res).to.be.json; - - ["success", "message", "error"].forEach(val => { - res.body.should.have.a.property(val); - }); - res.body.success.should.be.eql(false); - // expect(res.body.error).to.be.null(); - res.body.error.code.should.be.eql(11000); - done(); - }); - }); - it('should remove the customer by their unique `_id` successfully', function(done) { - chai.request(server) - .delete(`/api/v1/account/`) - .set("x-access-token", CUSTOMER_PAYLOAD.token) - .end(function(err, res) { - res.should.have.status(200); - res.body.success.should.be.eql(true); - done(); - }); - }); - it('should remove the customer by their unique `_id` unsuccessfully', function(done) { - chai.request(server) - .delete(`/api/v1/account/`) - .set("x-access-token", CUSTOMER_PAYLOAD.token) - .end(function(err, res) { - res.should.have.status(400); - res.body.should.be.a('object'); - // noinspection BadExpressionStatementJS - expect(res).to.be.json; - - ["success", "message", "error"].forEach(val => { - res.body.should.have.a.property(val); - }); - res.body.success.should.be.eql(false); - done(); - }); - }); - }); - }); -}); - -/* -describe('/GET Resources', () => { - it('it should GET all the resources', (done) => { - chai.request(server) - .get('api/v1/resources') - .end((err, res, body) => { - //res.should.have.status(200); - //res.body.length.should.be.eql(0); - //expect(res.statusCode).to.equal(200); - console.log(body); - done(); - }); - }); - }); -*/ - -/* describe('/POST book', () => { - it('it should not POST a book without pages field', (done) => { - let book = { - title: "The Lord of the Rings", - author: "J.R.R. Tolkien", - year: 1954 - } - chai.request(server) - .post('/book') - .send(book) - .end((err, res) => { - res.should.have.status(200); - res.body.should.be.a('object'); - res.body.should.have.property('errors'); - res.body.errors.should.have.property('pages'); - res.body.errors.pages.should.have.property('kind').eql('required'); - done(); - }); - }); - it('it should POST a book ', (done) => { - let book = { - title: "The Lord of the Rings", - author: "J.R.R. Tolkien", - year: 1954, - pages: 1170 - } - chai.request(server) - .post('/book') - .send(book) - .end((err, res) => { - res.should.have.status(200); - res.body.should.be.a('object'); - res.body.should.have.property('message').eql('Book successfully added!'); - res.body.book.should.have.property('title'); - res.body.book.should.have.property('author'); - res.body.book.should.have.property('pages'); - res.body.book.should.have.property('year'); - done(); - }); - }); - }); - /* - * Test the /GET/:id route - */ - /* describe('/GET/:id book', () => { - it('it should GET a book by the given id', (done) => { - let book = new Book({ title: "The Lord of the Rings", author: "J.R.R. Tolkien", year: 1954, pages: 1170 }); - book.save((err, book) => { - chai.request(server) - .get('/book/' + book.id) - .send(book) - .end((err, res) => { - res.should.have.status(200); - res.body.should.be.a('object'); - res.body.should.have.property('title'); - res.body.should.have.property('author'); - res.body.should.have.property('pages'); - res.body.should.have.property('year'); - res.body.should.have.property('_id').eql(book.id); - done(); - }); - }); - - }); - });*/ \ No newline at end of file diff --git a/services/test/test_auth.js b/services/test/test_auth.js deleted file mode 100644 index 2b14db7..0000000 --- a/services/test/test_auth.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; - -let chai = require('chai'); -let chai_http = require('chai-http'); -let expect = chai.expect; -let mongoose = require("mongoose"); - -chai.should(); -chai.use(chai_http); - -process.env.test = true; - -describe("Customer Authentication", function() { - var server; - const customer_payload = { - firstname: "Felix", - lastname: "Da Housecat", - email: "abc@123.com", - password: "password", - }; - - // TODO: Need to have a separate DB for testing and hardcode this guy in there. - before("Add a user account to the database to interact with", function() { - console.log("Creating user account"); - }); - after("Remove the user account from the database", function() { - console.log("Removing user account"); - }); - - // And of course we need to setup/teardown our server. - beforeEach("Instantiate server", () => { - server = require('../app').server; - console.log("Creating clean server for test."); - }); - - afterEach("Tear down server", () => { - require('../app').stop(); - console.log("Removing server from test."); - }); - - it("should allow the user to login", function(done) { - done(); - }); - it("should allow the user the logout", function(done) { - done(); - }); -}); \ No newline at end of file diff --git a/services/test/test_config.js b/services/test/test_config.js deleted file mode 100644 index 3179482..0000000 --- a/services/test/test_config.js +++ /dev/null @@ -1,50 +0,0 @@ -/** - * @fileoverview This file contains the test definitions for the configuration details. - */ - -"use strict"; - -let chai = require('chai'); -let chai_http = require('chai-http'); -let expect = chai.expect; - -chai.should(); -chai.use(chai_http); - -process.env.test = true; - -/** - * Tests the `api/config/config.js` file, ensuring that it - * has the minimum configuration details to stand the server up. - */ -describe("config", function() { - const config = require("../api/config/config"); - describe("constants", function() { - it("should have an api endpoint extension", function(done) { - let endpoint = config.API_ENDPOINT; - expect(endpoint).to.be.a("string"); - expect(endpoint, "/api/v1").to.equal("/api/v1"); - done(); - }); - - it("should have a database connection uri", function(done) { - let db_uri = config.DB_URI; - expect(db_uri).to.be.a("string"); - expect(db_uri.split('/')).to.have.lengthOf(4); - expect(db_uri.split('/')[3], "ShareResources").to.equal('ShareResources'); - done(); - }); - - it("should maintain an integer >= 10 for the number of bcrypt salt rounds", function(done) { - let salts = config.SALT_ROUNDS; - expect(salts).to.be.a('number'); - expect(salts).to.be.above(9); - done(); - }); - }); - - describe("constant paths", function() { - // TODO: mehhh... probably need to verify paths/dirs exist or something. - // it("should have an api folder") - }); -}); \ No newline at end of file diff --git a/spark-conf/default.conf b/spark-conf/default.conf new file mode 100644 index 0000000..f7e4e5c --- /dev/null +++ b/spark-conf/default.conf @@ -0,0 +1,16 @@ +server { + listen 80 default_server; + server_name http://localhost; + + ## Enable ModSecurity + modsecurity on; + modsecurity_rules_file /etc/nginx/conf.d/recommended.conf; + + location ~ /\.ht { + deny all; + } + + location ~ /\.git { + deny all; + } +} \ No newline at end of file diff --git a/spark-conf/recommended.conf b/spark-conf/recommended.conf new file mode 100644 index 0000000..3a87416 --- /dev/null +++ b/spark-conf/recommended.conf @@ -0,0 +1,225 @@ +# -- Rule engine initialization ---------------------------------------------- + +# Enable ModSecurity, attaching it to every transaction. Use detection +# only to start with, because that minimises the chances of post-installation +# disruption. +# +SecRuleEngine DetectionOnly + + +# -- Request body handling --------------------------------------------------- + +# Allow ModSecurity to access request bodies. If you don't, ModSecurity +# won't be able to see any POST parameters, which opens a large security +# hole for attackers to exploit. +# +SecRequestBodyAccess On + + +# Enable XML request body parser. +# Initiate XML Processor in case of xml content-type +# +SecRule REQUEST_HEADERS:Content-Type "(?:application(?:/soap\+|/)|text/)xml" \ + "id:'200000',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=XML" + +# Enable JSON request body parser. +# Initiate JSON Processor in case of JSON content-type; change accordingly +# if your application does not use 'application/json' +# +SecRule REQUEST_HEADERS:Content-Type "application/json" \ + "id:'200001',phase:1,t:none,t:lowercase,pass,nolog,ctl:requestBodyProcessor=JSON" + +# Maximum request body size we will accept for buffering. If you support +# file uploads then the value given on the first line has to be as large +# as the largest file you are willing to accept. The second value refers +# to the size of data, with files excluded. You want to keep that value as +# low as practical. +# +SecRequestBodyLimit 13107200 +SecRequestBodyNoFilesLimit 131072 + +# Store up to 128 KB of request body data in memory. When the multipart +# parser reaches this limit, it will start using your hard disk for +# storage. That is slow, but unavoidable. +# +SecRequestBodyInMemoryLimit 131072 + +# What do do if the request body size is above our configured limit. +# Keep in mind that this setting will automatically be set to ProcessPartial +# when SecRuleEngine is set to DetectionOnly mode in order to minimize +# disruptions when initially deploying ModSecurity. +# +SecRequestBodyLimitAction Reject + +# Verify that we've correctly processed the request body. +# As a rule of thumb, when failing to process a request body +# you should reject the request (when deployed in blocking mode) +# or log a high-severity alert (when deployed in detection-only mode). +# +SecRule REQBODY_ERROR "!@eq 0" \ +"id:'200002', phase:2,t:none,log,deny,status:400,msg:'Failed to parse request body.',logdata:'%{reqbody_error_msg}',severity:2" + +# By default be strict with what we accept in the multipart/form-data +# request body. If the rule below proves to be too strict for your +# environment consider changing it to detection-only. You are encouraged +# _not_ to remove it altogether. +# +SecRule MULTIPART_STRICT_ERROR "!@eq 0" \ +"id:'200003',phase:2,t:none,log,deny,status:400, \ +msg:'Multipart request body failed strict validation: \ +PE %{REQBODY_PROCESSOR_ERROR}, \ +BQ %{MULTIPART_BOUNDARY_QUOTED}, \ +BW %{MULTIPART_BOUNDARY_WHITESPACE}, \ +DB %{MULTIPART_DATA_BEFORE}, \ +DA %{MULTIPART_DATA_AFTER}, \ +HF %{MULTIPART_HEADER_FOLDING}, \ +LF %{MULTIPART_LF_LINE}, \ +SM %{MULTIPART_MISSING_SEMICOLON}, \ +IQ %{MULTIPART_INVALID_QUOTING}, \ +IP %{MULTIPART_INVALID_PART}, \ +IH %{MULTIPART_INVALID_HEADER_FOLDING}, \ +FL %{MULTIPART_FILE_LIMIT_EXCEEDED}'" + +# Did we see anything that might be a boundary? +# +SecRule MULTIPART_UNMATCHED_BOUNDARY "!@eq 0" \ +"id:'200004',phase:2,t:none,log,deny,msg:'Multipart parser detected a possible unmatched boundary.'" + +# PCRE Tuning +# We want to avoid a potential RegEx DoS condition +# +SecPcreMatchLimit 1000 +SecPcreMatchLimitRecursion 1000 + +# Some internal errors will set flags in TX and we will need to look for these. +# All of these are prefixed with "MSC_". The following flags currently exist: +# +# MSC_PCRE_LIMITS_EXCEEDED: PCRE match limits were exceeded. +# +SecRule TX:/^MSC_/ "!@streq 0" \ + "id:'200005',phase:2,t:none,deny,msg:'ModSecurity internal error flagged: %{MATCHED_VAR_NAME}'" + + +# -- Response body handling -------------------------------------------------- + +# Allow ModSecurity to access response bodies. +# You should have this directive enabled in order to identify errors +# and data leakage issues. +# +# Do keep in mind that enabling this directive does increases both +# memory consumption and response latency. +# +SecResponseBodyAccess On + +# Which response MIME types do you want to inspect? You should adjust the +# configuration below to catch documents but avoid static files +# (e.g., images and archives). +# +SecResponseBodyMimeType text/plain text/html text/xml + +# Buffer response bodies of up to 512 KB in length. +SecResponseBodyLimit 524288 + +# What happens when we encounter a response body larger than the configured +# limit? By default, we process what we have and let the rest through. +# That's somewhat less secure, but does not break any legitimate pages. +# +SecResponseBodyLimitAction ProcessPartial + + +# -- Filesystem configuration ------------------------------------------------ + +# The location where ModSecurity stores temporary files (for example, when +# it needs to handle a file upload that is larger than the configured limit). +# +# This default setting is chosen due to all systems have /tmp available however, +# this is less than ideal. It is recommended that you specify a location that's private. +# +SecTmpDir /tmp/ + +# The location where ModSecurity will keep its persistent data. This default setting +# is chosen due to all systems have /tmp available however, it +# too should be updated to a place that other users can't access. +# +SecDataDir /tmp/ + + +# -- File uploads handling configuration ------------------------------------- + +# The location where ModSecurity stores intercepted uploaded files. This +# location must be private to ModSecurity. You don't want other users on +# the server to access the files, do you? +# +#SecUploadDir /opt/modsecurity/var/upload/ + +# By default, only keep the files that were determined to be unusual +# in some way (by an external inspection script). For this to work you +# will also need at least one file inspection rule. +# +#SecUploadKeepFiles RelevantOnly + +# Uploaded files are by default created with permissions that do not allow +# any other user to access them. You may need to relax that if you want to +# interface ModSecurity to an external program (e.g., an anti-virus). +# +#SecUploadFileMode 0600 + + +# -- Debug log configuration ------------------------------------------------- + +# The default debug log configuration is to duplicate the error, warning +# and notice messages from the error log. +# +#SecDebugLog /opt/modsecurity/var/log/debug.log +#SecDebugLogLevel 3 + + +# -- Audit log configuration ------------------------------------------------- + +# Log the transactions that are marked by a rule, as well as those that +# trigger a server error (determined by a 5xx or 4xx, excluding 404, +# level response status codes). +# +SecAuditEngine RelevantOnly +SecAuditLogRelevantStatus "^(?:5|4(?!04))" + +# Log everything we know about a transaction. +SecAuditLogParts ABIJDEFHZ + +# Use a single file for logging. This is much easier to look at, but +# assumes that you will use the audit log only ocassionally. +# +SecAuditLogType Serial +SecAuditLog /var/log/modsec_audit.log + +# Specify the path for concurrent audit logging. +#SecAuditLogStorageDir /opt/modsecurity/var/audit/ + + +# -- Miscellaneous ----------------------------------------------------------- + +# Use the most commonly used application/x-www-form-urlencoded parameter +# separator. There's probably only one application somewhere that uses +# something else so don't expect to change this value. +# +SecArgumentSeparator & + +# Settle on version 0 (zero) cookies, as that is what most applications +# use. Using an incorrect cookie version may open your installation to +# evasion attacks (against the rules that examine named cookies). +# +SecCookieFormat 0 + +# Specify your Unicode Code Point. +# This mapping is used by the t:urlDecodeUni transformation function +# to properly map encoded data to your language. Properly setting +# these directives helps to reduce false positives and negatives. +# +SecUnicodeMapFile unicode.mapping 20127 + +# Improve the quality of ModSecurity by sharing information about your +# current ModSecurity version and dependencies versions. +# The following information will be shared: ModSecurity version, +# Web Server version, APR version, PCRE version, Lua version, Libxml2 +# version, Anonymous unique id for host. +SecStatusEngine On